From e15484ac6ce5e572290577d8ab9387b54e1ddd37 Mon Sep 17 00:00:00 2001 From: stack Date: Mon, 25 Jul 2016 23:23:50 -0700 Subject: [PATCH] HBASE-15638 Shade protobuf Shade protobuf from com.google.protobuf to org.apache.hadoop.hbase.shaded.com.google.protobuf. All protos are now in hbase-protocol because of previous commits. We do the shading of protobuf in this module only. Most of the patch is changing pb references to instead be to shaded pb except where we need to make use of pbs in hdfs; then we make reference to the transitively included 2.5 pb (See AsyncWAL*). There is some undoing of direct pb references where we turn the pb parse exception com.google.protobuf.InvalidProtocolBufferException into a plain IOE; on review, seems harmless and lessens some our direct pb mentions. Removed some checked in example classes and readded them under hbase-protocol where they are now generated. Fixes for hbase-rsgroup, hbase-reset, etc. findbugs warnings. --- dev-support/findbugs-exclude.xml | 8 + .../org/apache/hadoop/hbase/MetaTableAccessor.java | 2 +- .../hadoop/hbase/client/ClientSmallScanner.java | 2 +- .../hbase/client/ConnectionImplementation.java | 6 +- .../hadoop/hbase/client/FlushRegionCallable.java | 2 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 4 +- .../org/apache/hadoop/hbase/client/HTable.java | 8 +- .../hadoop/hbase/client/MetricsConnection.java | 4 +- .../hadoop/hbase/client/MultiServerCallable.java | 2 +- .../hbase/client/RegionCoprocessorServiceExec.java | 4 +- .../hadoop/hbase/client/RpcRetryingCallerImpl.java | 2 +- .../client/RpcRetryingCallerWithReadReplicas.java | 2 +- .../hadoop/hbase/client/ScannerCallable.java | 4 +- .../hadoop/hbase/client/SecureBulkLoadClient.java | 2 +- .../java/org/apache/hadoop/hbase/client/Table.java | 8 +- .../org/apache/hadoop/hbase/client/TableState.java | 5 +- .../client/coprocessor/AggregationClient.java | 4 +- .../client/replication/ReplicationSerDeHelper.java | 2 +- .../hbase/coprocessor/ColumnInterpreter.java | 2 +- .../hadoop/hbase/filter/BinaryComparator.java | 7 +- .../hbase/filter/BinaryPrefixComparator.java | 5 +- .../apache/hadoop/hbase/filter/BitComparator.java | 5 +- .../hadoop/hbase/filter/ColumnCountGetFilter.java | 3 +- .../hbase/filter/ColumnPaginationFilter.java | 3 +- .../hadoop/hbase/filter/ColumnPrefixFilter.java | 3 +- .../hadoop/hbase/filter/ColumnRangeFilter.java | 3 +- .../hadoop/hbase/filter/DependentColumnFilter.java | 3 +- .../apache/hadoop/hbase/filter/FamilyFilter.java | 4 +- .../org/apache/hadoop/hbase/filter/FilterList.java | 4 +- .../apache/hadoop/hbase/filter/FilterWrapper.java | 4 +- .../hadoop/hbase/filter/FirstKeyOnlyFilter.java | 3 +- .../FirstKeyValueMatchingQualifiersFilter.java | 4 +- .../apache/hadoop/hbase/filter/FuzzyRowFilter.java | 4 +- .../hadoop/hbase/filter/InclusiveStopFilter.java | 4 +- .../apache/hadoop/hbase/filter/KeyOnlyFilter.java | 3 +- .../apache/hadoop/hbase/filter/LongComparator.java | 5 +- .../hadoop/hbase/filter/MultiRowRangeFilter.java | 4 +- .../hbase/filter/MultipleColumnPrefixFilter.java | 4 +- .../apache/hadoop/hbase/filter/NullComparator.java | 5 +- .../org/apache/hadoop/hbase/filter/PageFilter.java | 3 +- .../apache/hadoop/hbase/filter/PrefixFilter.java | 4 +- .../hadoop/hbase/filter/QualifierFilter.java | 4 +- .../hadoop/hbase/filter/RandomRowFilter.java | 5 +- .../hadoop/hbase/filter/RegexStringComparator.java | 5 +- .../org/apache/hadoop/hbase/filter/RowFilter.java | 4 +- .../filter/SingleColumnValueExcludeFilter.java | 4 +- .../hbase/filter/SingleColumnValueFilter.java | 3 +- .../org/apache/hadoop/hbase/filter/SkipFilter.java | 4 +- .../hadoop/hbase/filter/SubstringComparator.java | 5 +- .../hadoop/hbase/filter/TimestampsFilter.java | 3 +- .../apache/hadoop/hbase/filter/ValueFilter.java | 4 +- .../hadoop/hbase/filter/WhileMatchFilter.java | 4 +- .../apache/hadoop/hbase/ipc/AbstractRpcClient.java | 10 +- .../org/apache/hadoop/hbase/ipc/AsyncCall.java | 4 +- .../apache/hadoop/hbase/ipc/AsyncRpcChannel.java | 4 +- .../hadoop/hbase/ipc/AsyncRpcChannelImpl.java | 4 +- .../apache/hadoop/hbase/ipc/AsyncRpcClient.java | 10 +- .../hbase/ipc/AsyncServerResponseHandler.java | 2 +- .../hadoop/hbase/ipc/BlockingRpcCallback.java | 2 +- .../java/org/apache/hadoop/hbase/ipc/Call.java | 6 +- .../hadoop/hbase/ipc/CoprocessorRpcChannel.java | 6 +- .../hadoop/hbase/ipc/CoprocessorRpcUtils.java | 10 +- .../java/org/apache/hadoop/hbase/ipc/IPCUtil.java | 4 +- .../hbase/ipc/MasterCoprocessorRpcChannel.java | 6 +- .../apache/hadoop/hbase/ipc/MessageConverter.java | 4 +- .../hbase/ipc/RegionCoprocessorRpcChannel.java | 6 +- .../ipc/RegionServerCoprocessorRpcChannel.java | 6 +- .../org/apache/hadoop/hbase/ipc/RpcClient.java | 6 +- .../org/apache/hadoop/hbase/ipc/RpcClientImpl.java | 12 +- .../hadoop/hbase/ipc/ServerRpcController.java | 4 +- .../hbase/ipc/SyncCoprocessorRpcChannel.java | 10 +- .../hadoop/hbase/ipc/TimeLimitedRpcController.java | 4 +- .../apache/hadoop/hbase/protobuf/ProtobufUtil.java | 31 +- .../hadoop/hbase/protobuf/RequestConverter.java | 2 +- .../hadoop/hbase/protobuf/ResponseConverter.java | 4 +- .../token/AuthenticationTokenIdentifier.java | 2 +- .../security/visibility/VisibilityClient.java | 2 +- .../hbase/zookeeper/MasterAddressTracker.java | 4 +- .../hadoop/hbase/zookeeper/MetaTableLocator.java | 3 +- .../org/apache/hadoop/hbase/zookeeper/ZKUtil.java | 2 +- .../hadoop/hbase/client/TestClientNoCluster.java | 6 +- .../hadoop/hbase/client/TestMetricsConnection.java | 2 +- .../hadoop/hbase/client/TestSnapshotFromAdmin.java | 2 +- .../hbase/exceptions/TestClientExceptionsUtil.java | 3 +- .../java/org/apache/hadoop/hbase/ServerName.java | 4 +- .../hbase/exceptions/DeserializationException.java | 5 +- .../java/org/apache/hadoop/hbase/types/PBType.java | 6 +- .../java/org/apache/hadoop/hbase/util/Bytes.java | 2 +- .../coprocessor/example/BulkDeleteEndpoint.java | 6 +- .../coprocessor/example/RowCountEndpoint.java | 8 +- .../example/generated/BulkDeleteProtos.java | 1792 -------------------- .../example/generated/ExampleProtos.java | 1149 ------------- .../java/org/apache/hadoop/hbase/types/PBCell.java | 4 +- .../hadoop/hbase/ipc/IntegrationTestRpcClient.java | 10 +- .../apache/hadoop/hbase/procedure2/Procedure.java | 2 +- .../hbase/procedure2/RemoteProcedureException.java | 2 +- .../procedure2/store/wal/ProcedureWALFormat.java | 2 +- .../store/wal/ProcedureWALFormatReader.java | 2 +- hbase-protocol/pom.xml | 52 + .../example/generated/BulkDeleteProtos.java | 1792 ++++++++++++++++++++ .../example/generated/ExampleProtos.java | 1149 +++++++++++++ hbase-protocol/src/main/protobuf/BulkDelete.proto | 7 +- .../apache/hadoop/hbase/rest/MultiRowResource.java | 2 +- .../hadoop/hbase/rest/client/RemoteHTable.java | 8 +- .../hadoop/hbase/rest/model/ScannerModel.java | 2 +- .../hadoop/hbase/rsgroup/RSGroupAdminClient.java | 2 +- .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java | 10 +- .../hadoop/hbase/rsgroup/RSGroupAdminServer.java | 4 +- .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 11 +- .../hbase/rsgroup/RSGroupInfoManagerImpl.java | 35 +- .../apache/hadoop/hbase/client/HTableWrapper.java | 8 +- .../client/coprocessor/RowProcessorClient.java | 2 +- .../hbase/coprocessor/AggregateImplementation.java | 10 +- .../coprocessor/BaseRowProcessorEndpoint.java | 10 +- .../hbase/coprocessor/CoprocessorService.java | 2 +- .../hadoop/hbase/coprocessor/EndpointObserver.java | 4 +- .../coprocessor/MultiRowMutationEndpoint.java | 6 +- .../coprocessor/SingletonCoprocessorService.java | 2 +- .../hbase/errorhandling/ForeignException.java | 2 +- .../io/asyncfs/FanOutOneBlockAsyncDFSOutput.java | 5 +- .../FanOutOneBlockAsyncDFSOutputSaslHelper.java | 56 +- .../org/apache/hadoop/hbase/ipc/CallRunner.java | 2 +- .../apache/hadoop/hbase/ipc/PriorityFunction.java | 2 +- .../hadoop/hbase/ipc/RWQueueRpcExecutor.java | 2 +- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 14 +- .../hadoop/hbase/ipc/RpcServerInterface.java | 8 +- .../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 10 +- .../hbase/master/ExpiredMobFileCleanerChore.java | 2 +- .../org/apache/hadoop/hbase/master/HMaster.java | 4 +- .../MasterAnnotationReadingPriorityFunction.java | 2 +- .../hadoop/hbase/master/MasterRpcServices.java | 14 +- .../apache/hadoop/hbase/master/MasterServices.java | 4 +- .../apache/hadoop/hbase/master/ServerManager.java | 4 +- .../balancer/FavoredNodeAssignmentHelper.java | 2 +- .../master/normalizer/SimpleRegionNormalizer.java | 2 +- .../hadoop/hbase/mob/ExpiredMobFileCleaner.java | 2 +- .../apache/hadoop/hbase/mob/mapreduce/Sweeper.java | 2 +- .../hbase/monitoring/MonitoredRPCHandler.java | 2 +- .../hbase/monitoring/MonitoredRPCHandlerImpl.java | 2 +- .../procedure/ZKProcedureCoordinatorRpcs.java | 2 +- .../hbase/procedure/ZKProcedureMemberRpcs.java | 2 +- .../hbase/protobuf/ReplicationProtbufUtil.java | 2 +- .../AnnotationReadingPriorityFunction.java | 4 +- .../hbase/regionserver/BaseRowProcessor.java | 2 +- .../apache/hadoop/hbase/regionserver/HRegion.java | 14 +- .../hadoop/hbase/regionserver/HRegionServer.java | 14 +- .../hadoop/hbase/regionserver/RSRpcServices.java | 10 +- .../apache/hadoop/hbase/regionserver/Region.java | 14 +- .../hbase/regionserver/RegionCoprocessorHost.java | 4 +- .../hbase/regionserver/RegionServerServices.java | 2 +- .../hadoop/hbase/regionserver/RowProcessor.java | 2 +- .../snapshot/RegionServerSnapshotManager.java | 2 +- .../hbase/regionserver/wal/ProtobufLogReader.java | 4 +- .../hbase/regionserver/wal/WALCellCodec.java | 2 +- .../hbase/regionserver/wal/WALEditsReplaySink.java | 2 +- .../hadoop/hbase/regionserver/wal/WALUtil.java | 2 +- .../RegionReplicaReplicationEndpoint.java | 2 +- .../hbase/security/access/AccessController.java | 8 +- .../access/HbaseObjectWritableFor96Migration.java | 4 +- .../security/access/SecureBulkLoadEndpoint.java | 6 +- .../hadoop/hbase/security/token/TokenProvider.java | 6 +- .../hadoop/hbase/security/token/TokenUtil.java | 2 +- .../security/visibility/VisibilityController.java | 8 +- .../hadoop/hbase/snapshot/SnapshotManifest.java | 4 +- .../hadoop/hbase/snapshot/SnapshotManifestV2.java | 2 +- .../org/apache/hadoop/hbase/util/HBaseFsck.java | 2 +- .../org/apache/hadoop/hbase/util/ProtoUtil.java | 4 +- .../java/org/apache/hadoop/hbase/wal/WALKey.java | 2 +- .../org/apache/hadoop/hbase/wal/WALSplitter.java | 4 +- .../main/resources/hbase-webapps/master/table.jsp | 2 +- .../hadoop/hbase/MockRegionServerServices.java | 2 +- .../org/apache/hadoop/hbase/QosTestHelper.java | 2 +- .../hbase/TestMetaTableAccessorNoCluster.java | 4 +- .../apache/hadoop/hbase/TestMetaTableLocator.java | 4 +- .../org/apache/hadoop/hbase/TestServerLoad.java | 2 +- .../org/apache/hadoop/hbase/client/TestAdmin1.java | 2 +- .../org/apache/hadoop/hbase/client/TestAdmin2.java | 2 +- .../hbase/client/TestClientScannerRPCTimeout.java | 4 +- .../hadoop/hbase/client/TestClientTimeouts.java | 10 +- .../hbase/client/TestHBaseAdminNoCluster.java | 4 +- .../apache/hadoop/hbase/client/TestMetaCache.java | 4 +- .../coprocessor/ColumnAggregationEndpoint.java | 6 +- .../ColumnAggregationEndpointNullResponse.java | 6 +- .../ColumnAggregationEndpointWithErrors.java | 6 +- .../coprocessor/ProtobufCoprocessorService.java | 6 +- .../coprocessor/TestBatchCoprocessorEndpoint.java | 2 +- .../hbase/coprocessor/TestCoprocessorEndpoint.java | 4 +- .../coprocessor/TestCoprocessorTableEndpoint.java | 2 +- .../TestRegionServerCoprocessorEndpoint.java | 6 +- .../coprocessor/TestRowProcessorEndpoint.java | 2 +- .../TestForeignExceptionSerialization.java | 2 +- .../hadoop/hbase/http/TestSpnegoHttpServer.java | 6 +- .../apache/hadoop/hbase/ipc/AbstractTestIPC.java | 30 +- .../org/apache/hadoop/hbase/ipc/TestAsyncIPC.java | 4 +- .../hadoop/hbase/ipc/TestCoprocessorRpcUtils.java | 2 +- .../java/org/apache/hadoop/hbase/ipc/TestIPC.java | 4 +- .../apache/hadoop/hbase/ipc/TestProtoBufRpc.java | 8 +- .../hadoop/hbase/ipc/TestRpcHandlerException.java | 10 +- .../hadoop/hbase/ipc/TestSimpleRpcScheduler.java | 2 +- .../TestLoadIncrementalHFilesSplitRecovery.java | 4 +- .../hbase/master/MockNoopMasterServices.java | 2 +- .../hadoop/hbase/master/MockRegionServer.java | 6 +- .../hadoop/hbase/master/TestCatalogJanitor.java | 5 +- .../hbase/master/TestHMasterRPCException.java | 4 +- .../hadoop/hbase/master/TestMasterNoCluster.java | 2 +- .../master/cleaner/TestSnapshotFromMaster.java | 2 +- .../normalizer/TestSimpleRegionNormalizer.java | 4 +- .../hadoop/hbase/protobuf/TestProtobufUtil.java | 2 +- .../hbase/regionserver/OOMERegionServer.java | 2 +- .../hadoop/hbase/regionserver/RegionAsTable.java | 8 +- .../regionserver/TestEndToEndSplitTransaction.java | 2 +- .../hadoop/hbase/regionserver/TestHRegion.java | 2 +- .../hadoop/hbase/regionserver/TestHRegionInfo.java | 2 +- .../regionserver/TestHRegionReplayEvents.java | 2 +- .../hadoop/hbase/regionserver/TestPriorityRpc.java | 2 +- .../hbase/regionserver/TestRSStatusServlet.java | 4 +- .../TestRegionMergeTransactionOnCluster.java | 4 +- .../hbase/regionserver/TestRegionReplicas.java | 2 +- .../regionserver/TestRegionServerNoMaster.java | 2 +- .../regionserver/TestScannerHeartbeatMessages.java | 4 +- .../regionserver/TestServerCustomProtocol.java | 8 +- .../TestSplitTransactionOnCluster.java | 4 +- .../hbase/replication/TestMasterReplication.java | 4 - .../hbase/security/AbstractTestSecureIPC.java | 8 +- .../hbase/security/access/SecureTestUtil.java | 4 +- .../security/access/TestAccessController.java | 10 +- .../security/access/TestNamespaceCommands.java | 2 +- .../token/TestGenerateDelegationToken.java | 2 +- .../security/token/TestTokenAuthentication.java | 8 +- .../security/visibility/TestVisibilityLabels.java | 2 +- ...tVisibilityLabelsOpWithDifferentUsersNoACL.java | 2 +- .../visibility/TestVisibilityLabelsWithACL.java | 2 +- ...VisibilityLabelsWithDefaultVisLabelService.java | 2 +- .../visibility/TestVisibilityLablesWithGroups.java | 2 +- .../visibility/TestWithDisabledAuthorization.java | 2 +- .../hbase/snapshot/SnapshotTestingUtils.java | 2 +- .../hbase/snapshot/TestSnapshotManifest.java | 2 +- .../apache/hadoop/hbase/wal/TestWALFiltering.java | 2 +- .../hadoop/hbase/spark/SparkSQLPushDownFilter.java | 4 +- 239 files changed, 3548 insertions(+), 3522 deletions(-) delete mode 100644 hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java delete mode 100644 hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java create mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java create mode 100644 hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java diff --git a/dev-support/findbugs-exclude.xml b/dev-support/findbugs-exclude.xml index 4d17582..5cf9089 100644 --- a/dev-support/findbugs-exclude.xml +++ b/dev-support/findbugs-exclude.xml @@ -37,6 +37,14 @@ + + + + + + + + diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 3d40c70..9f775f9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -35,7 +35,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java index f9bdd55..6c5ca00 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index bb5c996..f100630 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.client; import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java index 73bdb74..e2d5daf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRespons import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * A Callable for flushRegion() RPC. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 29650ef..3a9ed03 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index fbd9f51..077d145 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -19,10 +19,10 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java index 4fa20e6..8757408 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import com.codahale.metrics.Counter; import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java index e764ceb..9eef6d7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Callable that handles the multi method call going against a single diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java index ad1d2a1..829a4ba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java @@ -23,8 +23,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Objects; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java index cc2f159..e940143 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.ipc.RemoteException; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Runs an rpc'ing {@link RetryingCallable}. Sets into rpc client diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java index 65dbb10..ce078ab 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 72d69ec..7910e5a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -52,8 +52,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; /** * Scanner operations such as create, next, etc. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java index 7b1547d..c7481d6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.token.Token; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Client proxy for SecureBulkLoadProtocol diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java index f2cec97..9f5dede 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java @@ -32,10 +32,10 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Used to communicate with a single HBase table. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java index 5d4ac8e..e5b1624 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.InvalidProtocolBufferException; +import java.io.IOException; + import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -170,7 +171,7 @@ public class TableState { throws DeserializationException { try { return convert(tableName, HBaseProtos.TableState.parseFrom(bytes)); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 594a459..16697a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateServi import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * This client class is for invoking the aggregate functions deployed on the diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java index 9682f89..36793e9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.client.replication; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java index e22bd24..52072c2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * Defines how value for specific column is interpreted and provides utility diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java index 3cbb7b9..2bbcd3d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -28,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A binary comparator which lexicographically compares against the specified * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}. @@ -77,7 +76,7 @@ public class BinaryComparator extends ByteArrayComparable { ComparatorProtos.BinaryComparator proto; try { proto = ComparatorProtos.BinaryComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new BinaryComparator(proto.getComparable().getValue().toByteArray()); @@ -94,4 +93,4 @@ public class BinaryComparator extends ByteArrayComparable { return super.areSerializedFieldsEqual(other); } -} +} \ No newline at end of file diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java index a26edbc..d33113d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -28,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A comparator which compares against a specified byte array, but only compares * up to the length of this byte array. For the rest it is similar to @@ -82,7 +81,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable { ComparatorProtos.BinaryPrefixComparator proto; try { proto = ComparatorProtos.BinaryPrefixComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new BinaryPrefixComparator(proto.getComparable().getValue().toByteArray()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java index db51df7..2ecf4af 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -26,8 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A bit comparator which performs the specified bitwise operation on each of the bytes * with the specified byte array. Then returns whether the result is non-zero. @@ -90,7 +89,7 @@ public class BitComparator extends ByteArrayComparable { ComparatorProtos.BitComparator proto; try { proto = ComparatorProtos.BitComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } BitwiseOp bitwiseOp = BitwiseOp.valueOf(proto.getBitwiseOp().name()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index fd65130..6cc2927 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Simple filter that returns first N columns on row only. @@ -102,7 +101,7 @@ public class ColumnCountGetFilter extends FilterBase { FilterProtos.ColumnCountGetFilter proto; try { proto = FilterProtos.ColumnCountGetFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new ColumnCountGetFilter(proto.getLimit()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index e5ec412..1d6aa50 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter, based on the ColumnCountGetFilter, takes two arguments: limit and offset. @@ -188,7 +187,7 @@ public class ColumnPaginationFilter extends FilterBase { FilterProtos.ColumnPaginationFilter proto; try { proto = FilterProtos.ColumnPaginationFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } if (proto.hasColumnOffset()) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index ff6e8e2..32c2a5e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * This filter is used for selecting only those keys with columns that matches @@ -127,7 +126,7 @@ public class ColumnPrefixFilter extends FilterBase { FilterProtos.ColumnPrefixFilter proto; try { proto = FilterProtos.ColumnPrefixFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new ColumnPrefixFilter(proto.getPrefix().toByteArray()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 04682c5..f0aa531 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * This filter is used for selecting only those keys with columns that are @@ -193,7 +192,7 @@ public class ColumnRangeFilter extends FilterBase { FilterProtos.ColumnRangeFilter proto; try { proto = FilterProtos.ColumnRangeFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new ColumnRangeFilter(proto.hasMinColumn()?proto.getMinColumn().toByteArray():null, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index f7c6f26..6ad252d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter for adding inter-column timestamp matching @@ -241,7 +240,7 @@ public class DependentColumnFilter extends CompareFilter { FilterProtos.DependentColumnFilter proto; try { proto = FilterProtos.DependentColumnFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index b3f9a1a..34ee584 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** *

* This filter is used to filter based on the column family. It takes an @@ -99,7 +97,7 @@ public class FamilyFilter extends CompareFilter { FilterProtos.FamilyFilter proto; try { proto = FilterProtos.FamilyFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index da7a084..28b1ef5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -32,8 +32,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Implementation of {@link Filter} that represents an ordered List of Filters * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} @@ -399,7 +397,7 @@ final public class FilterList extends Filter { FilterProtos.FilterList proto; try { proto = FilterProtos.FilterList.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java index 4d7a18a..afce5c7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java @@ -28,8 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This is a Filter wrapper class which is used in the server side. Some filter * related hooks can be defined in this wrapper. The only way to create a @@ -70,7 +68,7 @@ final public class FilterWrapper extends Filter { FilterProtos.FilterWrapper proto; try { proto = FilterProtos.FilterWrapper.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index 80a1deb..8f2880a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter that will only return the first KV from each row. @@ -101,7 +100,7 @@ public class FirstKeyOnlyFilter extends FilterBase { // There is nothing to deserialize. Why do this at all? try { FilterProtos.FirstKeyOnlyFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } // Just return a new instance. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index 2e9510f..b3ad4e1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * The filter looks for the given columns in KeyValue. Once there is a match for diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 54402ef..6e4a5ed 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.util.UnsafeAccess; import org.apache.hadoop.hbase.util.UnsafeAvailChecker; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.InvalidProtocolBufferException; /** * This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key. @@ -274,7 +274,7 @@ public class FuzzyRowFilter extends FilterBase { FilterProtos.FuzzyRowFilter proto; try { proto = FilterProtos.FuzzyRowFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } int count = proto.getFuzzyKeysDataCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 1096f5e..733489c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; @@ -31,7 +32,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A Filter that stops after the given row. There is no "RowStopFilter" because @@ -98,7 +98,7 @@ public class InclusiveStopFilter extends FilterBase { FilterProtos.InclusiveStopFilter proto; try { proto = FilterProtos.InclusiveStopFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new InclusiveStopFilter(proto.hasStopRowKey()?proto.getStopRowKey().toByteArray():null); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index 2fd5aba..6aec5bb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter that will only return the key component of each KV (the value will @@ -112,7 +111,7 @@ public class KeyOnlyFilter extends FilterBase { FilterProtos.KeyOnlyFilter proto; try { proto = FilterProtos.KeyOnlyFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new KeyOnlyFilter(proto.getLenAsVal()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java index 9c56772..c91fccc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; -import com.google.protobuf.InvalidProtocolBufferException; - import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -76,7 +75,7 @@ public class LongComparator extends ByteArrayComparable { ComparatorProtos.LongComparator proto; try { proto = ComparatorProtos.LongComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new LongComparator(Bytes.toLong(proto.getComparable().getValue().toByteArray())); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index 5f9c833..e5e8194 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Filter to support scan multiple row key ranges. It can construct the row key ranges from the * passed list which can be accessed by each region server. @@ -172,7 +170,7 @@ public class MultiRowRangeFilter extends FilterBase { FilterProtos.MultiRowRangeFilter proto; try { proto = FilterProtos.MultiRowRangeFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } int length = proto.getRowRangeListCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 0c14649..15be3ea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -31,8 +31,6 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used for selecting only those keys with columns that matches * a particular prefix. For example, if prefix is 'an', it will pass keys will @@ -134,7 +132,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { FilterProtos.MultipleColumnPrefixFilter proto; try { proto = FilterProtos.MultipleColumnPrefixFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } int numPrefixes = proto.getSortedPrefixesCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java index 160232f..b82cf9b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -26,8 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A binary comparator which lexicographically compares against the specified * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}. @@ -86,7 +85,7 @@ public class NullComparator extends ByteArrayComparable { try { // Just parse. Don't use what we parse since on end we are returning new NullComparator. ComparatorProtos.NullComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new NullComparator(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index adc9c54..4a95dd7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Implementation of Filter interface that limits results to a specific page * size. It terminates scanning once the number of filter-passed rows is > @@ -112,7 +111,7 @@ public class PageFilter extends FilterBase { FilterProtos.PageFilter proto; try { proto = FilterProtos.PageFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new PageFilter(proto.getPageSize()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index d09ea2c..595bfb3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.ByteBufferedCell; @@ -32,7 +33,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Pass results that have same row prefix. @@ -122,7 +122,7 @@ public class PrefixFilter extends FilterBase { FilterProtos.PrefixFilter proto; try { proto = FilterProtos.PrefixFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new PrefixFilter(proto.hasPrefix()?proto.getPrefix().toByteArray():null); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index 3aa3558..d137efa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used to filter based on the column qualifier. It takes an * operator (equal, greater, not equal, etc) and a byte [] comparator for the @@ -98,7 +96,7 @@ public class QualifierFilter extends CompareFilter { FilterProtos.QualifierFilter proto; try { proto = FilterProtos.QualifierFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index decdc78..176015b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.Random; import org.apache.hadoop.hbase.Cell; @@ -27,8 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A filter that includes rows based on a chance. * @@ -129,7 +128,7 @@ public class RandomRowFilter extends FilterBase { FilterProtos.RandomRowFilter proto; try { proto = FilterProtos.RandomRowFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new RandomRowFilter(proto.getChance()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java index 70dd1f9..0b0b9b5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.util.Arrays; @@ -38,8 +39,6 @@ import org.joni.Option; import org.joni.Regex; import org.joni.Syntax; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This comparator is for use with {@link CompareFilter} implementations, such * as {@link RowFilter}, {@link QualifierFilter}, and {@link ValueFilter}, for @@ -171,7 +170,7 @@ public class RegexStringComparator extends ByteArrayComparable { ComparatorProtos.RegexStringComparator proto; try { proto = ComparatorProtos.RegexStringComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } RegexStringComparator comparator; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 559eff8..9754a7d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used to filter based on the key. It takes an operator * (equal, greater, not equal, etc) and a byte [] comparator for the row, @@ -115,7 +113,7 @@ public class RowFilter extends CompareFilter { FilterProtos.RowFilter proto; try { proto = FilterProtos.RowFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index d030fd2..d712e1cf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A {@link Filter} that checks a single column value, but does not emit the * tested column. This will enable a performance boost over @@ -149,7 +147,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { FilterProtos.SingleColumnValueExcludeFilter proto; try { proto = FilterProtos.SingleColumnValueExcludeFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index df4e482..5d42e8d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} @@ -337,7 +336,7 @@ public class SingleColumnValueFilter extends FilterBase { FilterProtos.SingleColumnValueFilter proto; try { proto = FilterProtos.SingleColumnValueFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index 3aced13..a60411b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -28,8 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A wrapper filter that filters an entire row if any of the Cell checks do * not pass. @@ -122,7 +120,7 @@ public class SkipFilter extends FilterBase { FilterProtos.SkipFilter proto; try { proto = FilterProtos.SkipFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java index d36b158..ef8b0ef 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.Locale; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -25,8 +26,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This comparator is for use with SingleColumnValueFilter, for filtering based on @@ -90,7 +89,7 @@ public class SubstringComparator extends ByteArrayComparable { ComparatorProtos.SubstringComparator proto; try { proto = ComparatorProtos.SubstringComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new SubstringComparator(proto.getSubstr()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index f0e5afe..03c39f0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Filter that returns only cells whose timestamp (version) is @@ -187,7 +186,7 @@ public class TimestampsFilter extends FilterBase { FilterProtos.TimestampsFilter proto; try { proto = FilterProtos.TimestampsFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new TimestampsFilter(proto.getTimestampsList(), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 2f679f0..ba2cc53 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used to filter based on column value. It takes an * operator (equal, greater, not equal, etc) and a byte [] comparator for the @@ -95,7 +93,7 @@ public class ValueFilter extends CompareFilter { FilterProtos.ValueFilter proto; try { proto = FilterProtos.ValueFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index e75ca49..18a2625 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -28,8 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A wrapper filter that returns true from {@link #filterAllRemaining()} as soon * as the wrapped filters {@link Filter#filterRowKey(byte[], int, int)}, @@ -124,7 +122,7 @@ public class WhileMatchFilter extends FilterBase { FilterProtos.WhileMatchFilter proto; try { proto = FilterProtos.WhileMatchFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index 3d3339a..72a652a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.net.ConnectException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java index 89e6ca4..0310baa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java index 8cc730f..66b95a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import io.netty.util.concurrent.EventExecutor; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java index 6b7dc5b..9a51127 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufOutputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java index 723a234..f4f4713 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInitializer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java index 6fcca34..9dbc928 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufInputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java index 0475e58..efae13a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java index 73bc0e2..65e96d0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; import java.io.IOException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.MetricsConnection; @@ -128,4 +128,4 @@ public class Call { public long getStartTime() { return this.callStats.getStartTime(); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java index 45c3700..a8de603 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -31,4 +31,4 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public @InterfaceStability.Evolving public interface CoprocessorRpcChannel extends RpcChannel, BlockingRpcChannel { -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java index 63ff3e8..d8c861d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.ByteStringer; @@ -36,7 +36,8 @@ public final class CoprocessorRpcUtils { */ private static final String hbaseServicePackage; static { - Descriptors.ServiceDescriptor clientService = ClientProtos.ClientService.getDescriptor(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor clientService = + ClientProtos.ClientService.getDescriptor(); hbaseServicePackage = clientService.getFullName() .substring(0, clientService.getFullName().lastIndexOf(clientService.getName())); } @@ -62,7 +63,8 @@ public final class CoprocessorRpcUtils { * Returns a service call instance for the given coprocessor request. */ public static ClientProtos.CoprocessorServiceCall buildServiceCall(byte[] row, - Descriptors.MethodDescriptor method, Message request) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, + org.apache.hadoop.hbase.shaded.com.google.protobuf.Message request) { return ClientProtos.CoprocessorServiceCall.newBuilder() .setRow(ByteStringer.wrap(row)) .setServiceName(CoprocessorRpcUtils.getServiceName(method.getService())) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 74f934c..181a7ff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.base.Preconditions; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import java.io.IOException; import java.io.OutputStream; import java.nio.BufferOverflowException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java index 6fae5cb..c191968 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java index a85225a..301d2df 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import java.io.IOException; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -38,7 +38,7 @@ public interface MessageConverter { */ O convert(M msg, CellScanner cellScanner) throws IOException; - MessageConverter NO_CONVERTER = new MessageConverter() { + MessageConverter NO_CONVERTER = new MessageConverter() { @Override public Message convert(Message msg, CellScanner cellScanner) throws IOException { return msg; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java index 55d6375..61b558f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java index c23d36c..74a328e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java @@ -11,9 +11,9 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java index 9d05c21..df469e0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel; import io.netty.util.concurrent.EventExecutor; import java.io.Closeable; import java.io.IOException; @@ -122,4 +122,4 @@ import org.apache.hadoop.hbase.security.User; * @return EventLoop */ EventExecutor getEventExecutor(); -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java index dc05af1..76b185d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.Message.Builder; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.Builder; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import io.netty.util.concurrent.EventExecutor; import java.io.BufferedInputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java index b899eb8..7f635a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java index 347d8a1..2952329 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java index cf08ea9..5b97b7b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index b3bf041..c3c0d60 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.hbase.protobuf; +import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier +.RegionSpecifierType.REGION_NAME; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; @@ -42,10 +45,6 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; - -import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier -.RegionSpecifierType.REGION_NAME; - import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; @@ -124,8 +123,8 @@ import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition; import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionLoad; -import org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; +import org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; @@ -171,27 +170,25 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.DynamicClassLoader; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Methods; -import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.net.HostAndPort; -import com.google.protobuf.ByteString; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.Message; -import com.google.protobuf.Parser; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; /** * Protobufs utility. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index b75d2b8..75d8570 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -116,7 +116,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.token.Token; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * Helper utility to build protocol buffer requests, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java index 421907d..37f589d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java @@ -59,8 +59,8 @@ import org.apache.hadoop.hbase.regionserver.RegionOpeningState; import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.util.StringUtils; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; /** * Helper utility to build protocol buffer responses, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java index 64e1a39..577b7d1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java @@ -29,7 +29,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.TokenIdentifier; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * Represents the identity information stored in an HBase authentication token. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java index 7527049..4f91e92 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Utility client for doing visibility labels admin operations. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java index 6f4859a..9e4b9c4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java @@ -31,8 +31,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Manages the location of the current active Master for the RegionServer. *

@@ -249,7 +247,7 @@ public class MasterAddressTracker extends ZooKeeperNodeTracker { int prefixLen = ProtobufUtil.lengthOfPBMagic(); try { return ZooKeeperProtos.Master.PARSER.parseFrom(data, prefixLen, data.length - prefixLen); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java index 497e8c4..b4c4cdb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.zookeeper; -import com.google.protobuf.InvalidProtocolBufferException; import java.io.EOFException; import java.io.IOException; @@ -493,7 +492,7 @@ public class MetaTableLocator { HBaseProtos.ServerName sn = rl.getServer(); serverName = ServerName.valueOf( sn.getHostName(), sn.getPort(), sn.getStartCode()); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException("Unable to parse meta region location"); } } else { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index f5b720e..4dd26ad 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -77,7 +77,7 @@ import org.apache.zookeeper.proto.DeleteRequest; import org.apache.zookeeper.proto.SetDataRequest; import org.apache.zookeeper.server.ZooKeeperSaslServer; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Internal HBase utility class for ZooKeeper. diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index 1ece448..cb597e4 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -90,9 +90,9 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.base.Stopwatch; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test client behavior w/o setting up a cluster. diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java index 5191880..ee04b0c 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java index b41c859..d13f604 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java @@ -40,7 +40,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; /** * Test snapshot logic from the client diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java index 968e55c..c9692c9 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java @@ -18,14 +18,13 @@ */ package org.apache.hadoop.hbase.exceptions; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.junit.Test; import java.io.IOException; import static org.junit.Assert.*; -@SuppressWarnings("ThrowableInstanceNeverThrown") public class TestClientExceptionsUtil { @Test diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java index 12804df..234addd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase; import com.google.common.net.HostAndPort; import com.google.common.net.InetAddresses; -import com.google.protobuf.InvalidProtocolBufferException; +import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; @@ -389,7 +389,7 @@ import org.apache.hadoop.hbase.util.Bytes; ZooKeeperProtos.Master.PARSER.parseFrom(data, prefixLen, data.length - prefixLen); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getMaster(); return valueOf(sn.getHostName(), sn.getPort(), sn.getStartCode()); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { // A failed parse of the znode is pretty catastrophic. Rather than loop // retrying hoping the bad bytes will changes, and rather than change // the signature on this method to add an IOE which will send ripples all diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java index 0ce0219..179eac7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java @@ -21,6 +21,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; /** * Failed deserialization. + * Usually we'll be passed a protobuf InvalidProtocolBufferException or some such. This exception + * converts protobuf or weird stuff that comes out when trying to parse stuff into a generic + * 'deserialization' exception. */ @InterfaceAudience.Private @SuppressWarnings("serial") @@ -40,4 +43,4 @@ public class DeserializationException extends HBaseException { public DeserializationException(final Throwable t) { super(t); } -} +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java index 3d545f6..c325e85 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java @@ -22,9 +22,9 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * A base-class for {@link DataType} implementations backed by protobuf. See diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 41292a5..f90988c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -35,7 +35,7 @@ import java.util.Comparator; import java.util.Iterator; import java.util.List; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java index c9ab23c..1faff35 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java @@ -51,9 +51,9 @@ import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Defines a protocol to delete data in bulk based on a scan. The scan can be range scan or with diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java index 4309cdc..ba09dbc 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java @@ -36,16 +36,16 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Sample coprocessor endpoint exposing a Service interface for counting rows and key values. * *

* For the protocol buffer definition of the RowCountService, see the source file located under - * hbase-server/src/main/protobuf/Examples.proto. + * hbase-protocol/src/main/protobuf/Examples.proto. *

*/ public class RowCountEndpoint extends ExampleProtos.RowCountService diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java deleted file mode 100644 index 373e036..0000000 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java +++ /dev/null @@ -1,1792 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: BulkDelete.proto - -package org.apache.hadoop.hbase.coprocessor.example.generated; - -public final class BulkDeleteProtos { - private BulkDeleteProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface BulkDeleteRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .hbase.pb.Scan scan = 1; - /** - * required .hbase.pb.Scan scan = 1; - */ - boolean hasScan(); - /** - * required .hbase.pb.Scan scan = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); - /** - * required .hbase.pb.Scan scan = 1; - */ - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); - - // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - boolean hasDeleteType(); - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType(); - - // optional uint64 timestamp = 3; - /** - * optional uint64 timestamp = 3; - */ - boolean hasTimestamp(); - /** - * optional uint64 timestamp = 3; - */ - long getTimestamp(); - - // required uint32 rowBatchSize = 4; - /** - * required uint32 rowBatchSize = 4; - */ - boolean hasRowBatchSize(); - /** - * required uint32 rowBatchSize = 4; - */ - int getRowBatchSize(); - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteRequest} - */ - public static final class BulkDeleteRequest extends - com.google.protobuf.GeneratedMessage - implements BulkDeleteRequestOrBuilder { - // Use BulkDeleteRequest.newBuilder() to construct. - private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkDeleteRequest defaultInstance; - public static BulkDeleteRequest getDefaultInstance() { - return defaultInstance; - } - - public BulkDeleteRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private BulkDeleteRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = scan_.toBuilder(); - } - scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(scan_); - scan_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - deleteType_ = value; - } - break; - } - case 24: { - bitField0_ |= 0x00000004; - timestamp_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - rowBatchSize_ = input.readUInt32(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkDeleteRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkDeleteRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - /** - * Protobuf enum {@code hbase.pb.BulkDeleteRequest.DeleteType} - */ - public enum DeleteType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * ROW = 0; - */ - ROW(0, 0), - /** - * FAMILY = 1; - */ - FAMILY(1, 1), - /** - * COLUMN = 2; - */ - COLUMN(2, 2), - /** - * VERSION = 3; - */ - VERSION(3, 3), - ; - - /** - * ROW = 0; - */ - public static final int ROW_VALUE = 0; - /** - * FAMILY = 1; - */ - public static final int FAMILY_VALUE = 1; - /** - * COLUMN = 2; - */ - public static final int COLUMN_VALUE = 2; - /** - * VERSION = 3; - */ - public static final int VERSION_VALUE = 3; - - - public final int getNumber() { return value; } - - public static DeleteType valueOf(int value) { - switch (value) { - case 0: return ROW; - case 1: return FAMILY; - case 2: return COLUMN; - case 3: return VERSION; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public DeleteType findValueByNumber(int number) { - return DeleteType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0); - } - - private static final DeleteType[] VALUES = values(); - - public static DeleteType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private DeleteType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:hbase.pb.BulkDeleteRequest.DeleteType) - } - - private int bitField0_; - // required .hbase.pb.Scan scan = 1; - public static final int SCAN_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; - /** - * required .hbase.pb.Scan scan = 1; - */ - public boolean hasScan() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - return scan_; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - return scan_; - } - - // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - public static final int DELETETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_; - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public boolean hasDeleteType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() { - return deleteType_; - } - - // optional uint64 timestamp = 3; - public static final int TIMESTAMP_FIELD_NUMBER = 3; - private long timestamp_; - /** - * optional uint64 timestamp = 3; - */ - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional uint64 timestamp = 3; - */ - public long getTimestamp() { - return timestamp_; - } - - // required uint32 rowBatchSize = 4; - public static final int ROWBATCHSIZE_FIELD_NUMBER = 4; - private int rowBatchSize_; - /** - * required uint32 rowBatchSize = 4; - */ - public boolean hasRowBatchSize() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * required uint32 rowBatchSize = 4; - */ - public int getRowBatchSize() { - return rowBatchSize_; - } - - private void initFields() { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - timestamp_ = 0L; - rowBatchSize_ = 0; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasScan()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasDeleteType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasRowBatchSize()) { - memoizedIsInitialized = 0; - return false; - } - if (!getScan().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, scan_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, deleteType_.getNumber()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt32(4, rowBatchSize_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, scan_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, deleteType_.getNumber()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, timestamp_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, rowBatchSize_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) obj; - - boolean result = true; - result = result && (hasScan() == other.hasScan()); - if (hasScan()) { - result = result && getScan() - .equals(other.getScan()); - } - result = result && (hasDeleteType() == other.hasDeleteType()); - if (hasDeleteType()) { - result = result && - (getDeleteType() == other.getDeleteType()); - } - result = result && (hasTimestamp() == other.hasTimestamp()); - if (hasTimestamp()) { - result = result && (getTimestamp() - == other.getTimestamp()); - } - result = result && (hasRowBatchSize() == other.hasRowBatchSize()); - if (hasRowBatchSize()) { - result = result && (getRowBatchSize() - == other.getRowBatchSize()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasScan()) { - hash = (37 * hash) + SCAN_FIELD_NUMBER; - hash = (53 * hash) + getScan().hashCode(); - } - if (hasDeleteType()) { - hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getDeleteType()); - } - if (hasTimestamp()) { - hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getTimestamp()); - } - if (hasRowBatchSize()) { - hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER; - hash = (53 * hash) + getRowBatchSize(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getScanFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - bitField0_ = (bitField0_ & ~0x00000002); - timestamp_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - rowBatchSize_ = 0; - bitField0_ = (bitField0_ & ~0x00000008); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest build() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (scanBuilder_ == null) { - result.scan_ = scan_; - } else { - result.scan_ = scanBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.deleteType_ = deleteType_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.timestamp_ = timestamp_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.rowBatchSize_ = rowBatchSize_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this; - if (other.hasScan()) { - mergeScan(other.getScan()); - } - if (other.hasDeleteType()) { - setDeleteType(other.getDeleteType()); - } - if (other.hasTimestamp()) { - setTimestamp(other.getTimestamp()); - } - if (other.hasRowBatchSize()) { - setRowBatchSize(other.getRowBatchSize()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasScan()) { - - return false; - } - if (!hasDeleteType()) { - - return false; - } - if (!hasRowBatchSize()) { - - return false; - } - if (!getScan().isInitialized()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required .hbase.pb.Scan scan = 1; - private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; - /** - * required .hbase.pb.Scan scan = 1; - */ - public boolean hasScan() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { - if (scanBuilder_ == null) { - return scan_; - } else { - return scanBuilder_.getMessage(); - } - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - scan_ = value; - onChanged(); - } else { - scanBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder setScan( - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { - if (scanBuilder_ == null) { - scan_ = builderForValue.build(); - onChanged(); - } else { - scanBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { - if (scanBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { - scan_ = - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); - } else { - scan_ = value; - } - onChanged(); - } else { - scanBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public Builder clearScan() { - if (scanBuilder_ == null) { - scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - onChanged(); - } else { - scanBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getScanFieldBuilder().getBuilder(); - } - /** - * required .hbase.pb.Scan scan = 1; - */ - public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { - if (scanBuilder_ != null) { - return scanBuilder_.getMessageOrBuilder(); - } else { - return scan_; - } - } - /** - * required .hbase.pb.Scan scan = 1; - */ - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> - getScanFieldBuilder() { - if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( - scan_, - getParentForChildren(), - isClean()); - scan_ = null; - } - return scanBuilder_; - } - - // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public boolean hasDeleteType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() { - return deleteType_; - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - deleteType_ = value; - onChanged(); - return this; - } - /** - * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; - */ - public Builder clearDeleteType() { - bitField0_ = (bitField0_ & ~0x00000002); - deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; - onChanged(); - return this; - } - - // optional uint64 timestamp = 3; - private long timestamp_ ; - /** - * optional uint64 timestamp = 3; - */ - public boolean hasTimestamp() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional uint64 timestamp = 3; - */ - public long getTimestamp() { - return timestamp_; - } - /** - * optional uint64 timestamp = 3; - */ - public Builder setTimestamp(long value) { - bitField0_ |= 0x00000004; - timestamp_ = value; - onChanged(); - return this; - } - /** - * optional uint64 timestamp = 3; - */ - public Builder clearTimestamp() { - bitField0_ = (bitField0_ & ~0x00000004); - timestamp_ = 0L; - onChanged(); - return this; - } - - // required uint32 rowBatchSize = 4; - private int rowBatchSize_ ; - /** - * required uint32 rowBatchSize = 4; - */ - public boolean hasRowBatchSize() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * required uint32 rowBatchSize = 4; - */ - public int getRowBatchSize() { - return rowBatchSize_; - } - /** - * required uint32 rowBatchSize = 4; - */ - public Builder setRowBatchSize(int value) { - bitField0_ |= 0x00000008; - rowBatchSize_ = value; - onChanged(); - return this; - } - /** - * required uint32 rowBatchSize = 4; - */ - public Builder clearRowBatchSize() { - bitField0_ = (bitField0_ & ~0x00000008); - rowBatchSize_ = 0; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteRequest) - } - - static { - defaultInstance = new BulkDeleteRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteRequest) - } - - public interface BulkDeleteResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 rowsDeleted = 1; - /** - * required uint64 rowsDeleted = 1; - */ - boolean hasRowsDeleted(); - /** - * required uint64 rowsDeleted = 1; - */ - long getRowsDeleted(); - - // optional uint64 versionsDeleted = 2; - /** - * optional uint64 versionsDeleted = 2; - */ - boolean hasVersionsDeleted(); - /** - * optional uint64 versionsDeleted = 2; - */ - long getVersionsDeleted(); - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteResponse} - */ - public static final class BulkDeleteResponse extends - com.google.protobuf.GeneratedMessage - implements BulkDeleteResponseOrBuilder { - // Use BulkDeleteResponse.newBuilder() to construct. - private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final BulkDeleteResponse defaultInstance; - public static BulkDeleteResponse getDefaultInstance() { - return defaultInstance; - } - - public BulkDeleteResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private BulkDeleteResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - rowsDeleted_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - versionsDeleted_ = input.readUInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public BulkDeleteResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new BulkDeleteResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required uint64 rowsDeleted = 1; - public static final int ROWSDELETED_FIELD_NUMBER = 1; - private long rowsDeleted_; - /** - * required uint64 rowsDeleted = 1; - */ - public boolean hasRowsDeleted() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required uint64 rowsDeleted = 1; - */ - public long getRowsDeleted() { - return rowsDeleted_; - } - - // optional uint64 versionsDeleted = 2; - public static final int VERSIONSDELETED_FIELD_NUMBER = 2; - private long versionsDeleted_; - /** - * optional uint64 versionsDeleted = 2; - */ - public boolean hasVersionsDeleted() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional uint64 versionsDeleted = 2; - */ - public long getVersionsDeleted() { - return versionsDeleted_; - } - - private void initFields() { - rowsDeleted_ = 0L; - versionsDeleted_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRowsDeleted()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, rowsDeleted_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, versionsDeleted_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, rowsDeleted_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, versionsDeleted_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) obj; - - boolean result = true; - result = result && (hasRowsDeleted() == other.hasRowsDeleted()); - if (hasRowsDeleted()) { - result = result && (getRowsDeleted() - == other.getRowsDeleted()); - } - result = result && (hasVersionsDeleted() == other.hasVersionsDeleted()); - if (hasVersionsDeleted()) { - result = result && (getVersionsDeleted() - == other.getVersionsDeleted()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasRowsDeleted()) { - hash = (37 * hash) + ROWSDELETED_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getRowsDeleted()); - } - if (hasVersionsDeleted()) { - hash = (37 * hash) + VERSIONSDELETED_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getVersionsDeleted()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.BulkDeleteResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - rowsDeleted_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - versionsDeleted_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse build() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.rowsDeleted_ = rowsDeleted_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.versionsDeleted_ = versionsDeleted_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()) return this; - if (other.hasRowsDeleted()) { - setRowsDeleted(other.getRowsDeleted()); - } - if (other.hasVersionsDeleted()) { - setVersionsDeleted(other.getVersionsDeleted()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRowsDeleted()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required uint64 rowsDeleted = 1; - private long rowsDeleted_ ; - /** - * required uint64 rowsDeleted = 1; - */ - public boolean hasRowsDeleted() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required uint64 rowsDeleted = 1; - */ - public long getRowsDeleted() { - return rowsDeleted_; - } - /** - * required uint64 rowsDeleted = 1; - */ - public Builder setRowsDeleted(long value) { - bitField0_ |= 0x00000001; - rowsDeleted_ = value; - onChanged(); - return this; - } - /** - * required uint64 rowsDeleted = 1; - */ - public Builder clearRowsDeleted() { - bitField0_ = (bitField0_ & ~0x00000001); - rowsDeleted_ = 0L; - onChanged(); - return this; - } - - // optional uint64 versionsDeleted = 2; - private long versionsDeleted_ ; - /** - * optional uint64 versionsDeleted = 2; - */ - public boolean hasVersionsDeleted() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional uint64 versionsDeleted = 2; - */ - public long getVersionsDeleted() { - return versionsDeleted_; - } - /** - * optional uint64 versionsDeleted = 2; - */ - public Builder setVersionsDeleted(long value) { - bitField0_ |= 0x00000002; - versionsDeleted_ = value; - onChanged(); - return this; - } - /** - * optional uint64 versionsDeleted = 2; - */ - public Builder clearVersionsDeleted() { - bitField0_ = (bitField0_ & ~0x00000002); - versionsDeleted_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteResponse) - } - - static { - defaultInstance = new BulkDeleteResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteResponse) - } - - /** - * Protobuf service {@code hbase.pb.BulkDeleteService} - */ - public static abstract class BulkDeleteService - implements com.google.protobuf.Service { - protected BulkDeleteService() {} - - public interface Interface { - /** - * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); - */ - public abstract void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new BulkDeleteService() { - @java.lang.Override - public void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done) { - impl.delete(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); - */ - public abstract void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_BulkDeleteRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_BulkDeleteResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\020BulkDelete.proto\022\010hbase.pb\032\014Client.pro" + - "to\"\322\001\n\021BulkDeleteRequest\022\034\n\004scan\030\001 \002(\0132\016" + - ".hbase.pb.Scan\022:\n\ndeleteType\030\002 \002(\0162&.hba" + - "se.pb.BulkDeleteRequest.DeleteType\022\021\n\tti" + - "mestamp\030\003 \001(\004\022\024\n\014rowBatchSize\030\004 \002(\r\":\n\nD" + - "eleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY\020\001\022\n\n\006COLUMN" + - "\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDeleteResponse\022\023\n" + - "\013rowsDeleted\030\001 \002(\004\022\027\n\017versionsDeleted\030\002 " + - "\001(\0042X\n\021BulkDeleteService\022C\n\006delete\022\033.hba" + - "se.pb.BulkDeleteRequest\032\034.hbase.pb.BulkD", - "eleteResponseBQ\n5org.apache.hadoop.hbase" + - ".coprocessor.example.generatedB\020BulkDele" + - "teProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_BulkDeleteRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkDeleteRequest_descriptor, - new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", }); - internal_static_hbase_pb_BulkDeleteResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_BulkDeleteResponse_descriptor, - new java.lang.String[] { "RowsDeleted", "VersionsDeleted", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java deleted file mode 100644 index b780985..0000000 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java +++ /dev/null @@ -1,1149 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: Examples.proto - -package org.apache.hadoop.hbase.coprocessor.example.generated; - -public final class ExampleProtos { - private ExampleProtos() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface CountRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code hbase.pb.CountRequest} - */ - public static final class CountRequest extends - com.google.protobuf.GeneratedMessage - implements CountRequestOrBuilder { - // Use CountRequest.newBuilder() to construct. - private CountRequest(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CountRequest defaultInstance; - public static CountRequest getDefaultInstance() { - return defaultInstance; - } - - public CountRequest getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CountRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CountRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CountRequest(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private void initFields() { - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) obj; - - boolean result = true; - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.CountRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest build() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest(this); - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance()) return this; - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.CountRequest) - } - - static { - defaultInstance = new CountRequest(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.CountRequest) - } - - public interface CountResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required int64 count = 1 [default = 0]; - /** - * required int64 count = 1 [default = 0]; - */ - boolean hasCount(); - /** - * required int64 count = 1 [default = 0]; - */ - long getCount(); - } - /** - * Protobuf type {@code hbase.pb.CountResponse} - */ - public static final class CountResponse extends - com.google.protobuf.GeneratedMessage - implements CountResponseOrBuilder { - // Use CountResponse.newBuilder() to construct. - private CountResponse(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final CountResponse defaultInstance; - public static CountResponse getDefaultInstance() { - return defaultInstance; - } - - public CountResponse getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CountResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - count_ = input.readInt64(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public CountResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CountResponse(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required int64 count = 1 [default = 0]; - public static final int COUNT_FIELD_NUMBER = 1; - private long count_; - /** - * required int64 count = 1 [default = 0]; - */ - public boolean hasCount() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 count = 1 [default = 0]; - */ - public long getCount() { - return count_; - } - - private void initFields() { - count_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasCount()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeInt64(1, count_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(1, count_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) obj; - - boolean result = true; - result = result && (hasCount() == other.hasCount()); - if (hasCount()) { - result = result && (getCount() - == other.getCount()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - private int memoizedHashCode = 0; - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasCount()) { - hash = (37 * hash) + COUNT_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getCount()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code hbase.pb.CountResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); - } - - // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - count_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse build() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildPartial() { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.count_ = count_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) { - return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other) { - if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()) return this; - if (other.hasCount()) { - setCount(other.getCount()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasCount()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required int64 count = 1 [default = 0]; - private long count_ ; - /** - * required int64 count = 1 [default = 0]; - */ - public boolean hasCount() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required int64 count = 1 [default = 0]; - */ - public long getCount() { - return count_; - } - /** - * required int64 count = 1 [default = 0]; - */ - public Builder setCount(long value) { - bitField0_ |= 0x00000001; - count_ = value; - onChanged(); - return this; - } - /** - * required int64 count = 1 [default = 0]; - */ - public Builder clearCount() { - bitField0_ = (bitField0_ & ~0x00000001); - count_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:hbase.pb.CountResponse) - } - - static { - defaultInstance = new CountResponse(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:hbase.pb.CountResponse) - } - - /** - * Protobuf service {@code hbase.pb.RowCountService} - */ - public static abstract class RowCountService - implements com.google.protobuf.Service { - protected RowCountService() {} - - public interface Interface { - /** - * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - } - - public static com.google.protobuf.Service newReflectiveService( - final Interface impl) { - return new RowCountService() { - @java.lang.Override - public void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - impl.getRowCount(controller, request, done); - } - - @java.lang.Override - public void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - impl.getKeyValueCount(controller, request, done); - } - - }; - } - - public static com.google.protobuf.BlockingService - newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callBlockingMethod() given method descriptor for " + - "wrong service type."); - } - switch(method.getIndex()) { - case 0: - return impl.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request); - case 1: - return impl.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - }; - } - - /** - * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - /** - * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); - */ - public abstract void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); - - public static final - com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.getDescriptor().getServices().get(0); - } - public final com.google.protobuf.Descriptors.ServiceDescriptor - getDescriptorForType() { - return getDescriptor(); - } - - public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.callMethod() given method descriptor for wrong " + - "service type."); - } - switch(method.getIndex()) { - case 0: - this.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - case 1: - this.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( - done)); - return; - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getRequestPrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public final com.google.protobuf.Message - getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { - if (method.getService() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "Service.getResponsePrototype() given method " + - "descriptor for wrong service type."); - } - switch(method.getIndex()) { - case 0: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - case 1: - return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); - default: - throw new java.lang.AssertionError("Can't get here."); - } - } - - public static Stub newStub( - com.google.protobuf.RpcChannel channel) { - return new Stub(channel); - } - - public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.RowCountService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.RpcChannel channel; - - public com.google.protobuf.RpcChannel getChannel() { - return channel; - } - - public void getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); - } - - public void getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { - channel.callMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( - done, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); - } - } - - public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { - return new BlockingStub(channel); - } - - public interface BlockingInterface { - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException; - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException; - } - - private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { - this.channel = channel; - } - - private final com.google.protobuf.BlockingRpcChannel channel; - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(0), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()); - } - - - public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( - com.google.protobuf.RpcController controller, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException { - return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( - getDescriptor().getMethods().get(1), - controller, - request, - org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()); - } - - } - - // @@protoc_insertion_point(class_scope:hbase.pb.RowCountService) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_CountRequest_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_CountRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_hbase_pb_CountResponse_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_hbase_pb_CountResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\016Examples.proto\022\010hbase.pb\"\016\n\014CountReque" + - "st\"!\n\rCountResponse\022\020\n\005count\030\001 \002(\003:\00102\226\001" + - "\n\017RowCountService\022>\n\013getRowCount\022\026.hbase" + - ".pb.CountRequest\032\027.hbase.pb.CountRespons" + - "e\022C\n\020getKeyValueCount\022\026.hbase.pb.CountRe" + - "quest\032\027.hbase.pb.CountResponseBN\n5org.ap" + - "ache.hadoop.hbase.coprocessor.example.ge" + - "neratedB\rExampleProtosH\001\210\001\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_hbase_pb_CountRequest_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_hbase_pb_CountRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CountRequest_descriptor, - new java.lang.String[] { }); - internal_static_hbase_pb_CountResponse_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_hbase_pb_CountResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_hbase_pb_CountResponse_descriptor, - new java.lang.String[] { "Count", }); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java index c063aa9..26aa887 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.types; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.util.PositionedByteRange; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index 0a7ac8a..48d276c 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -56,11 +56,11 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; -import com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; @Category(IntegrationTests.class) public class IntegrationTestRpcClient { diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java index b401871..ab900e7 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.util.NonceKey; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * Base Procedure class responsible to handle the Procedure Metadata diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java index 71aae84..0aa5a90 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage; import org.apache.hadoop.hbase.util.ForeignExceptionUtil; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A RemoteProcedureException is an exception from another thread or process. diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java index 02b2db9..36fdaa9 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.procedure2.store.wal; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; import java.io.InputStream; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java index 8678c86..c87fb37 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.procedure2.store.wal; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index b7846ca..f515207 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -37,6 +37,52 @@ + + + org.apache.maven.plugins + maven-shade-plugin + 2.4.3 + + + package + + shade + + + + + com.google.protobuf + org.apache.hadoop.hbase.shaded.com.google.protobuf + + + + + + commons-logging:commons-logging + com.github.stephenc.findbugs:findbugs-annotations + log4j:log4j + org.hamcrest:hamcrest-core + org.mockito:mockito-all + junit:junit + org.apache.hbase:hbase-annotations + + + + + + org.apache.maven.plugins maven-site-plugin @@ -106,6 +152,9 @@ + org.apache.hbase @@ -121,6 +170,7 @@ com.google.protobuf protobuf-java + 2.5.0 commons-logging @@ -173,12 +223,14 @@ Admin.proto Aggregate.proto Authentication.proto + BulkDelete.proto Cell.proto Client.proto ClusterId.proto ClusterStatus.proto Comparator.proto Encryption.proto + Examples.proto ErrorHandling.proto FS.proto Filter.proto diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java new file mode 100644 index 0000000..373e036 --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java @@ -0,0 +1,1792 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: BulkDelete.proto + +package org.apache.hadoop.hbase.coprocessor.example.generated; + +public final class BulkDeleteProtos { + private BulkDeleteProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface BulkDeleteRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .hbase.pb.Scan scan = 1; + /** + * required .hbase.pb.Scan scan = 1; + */ + boolean hasScan(); + /** + * required .hbase.pb.Scan scan = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + /** + * required .hbase.pb.Scan scan = 1; + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + boolean hasDeleteType(); + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType(); + + // optional uint64 timestamp = 3; + /** + * optional uint64 timestamp = 3; + */ + boolean hasTimestamp(); + /** + * optional uint64 timestamp = 3; + */ + long getTimestamp(); + + // required uint32 rowBatchSize = 4; + /** + * required uint32 rowBatchSize = 4; + */ + boolean hasRowBatchSize(); + /** + * required uint32 rowBatchSize = 4; + */ + int getRowBatchSize(); + } + /** + * Protobuf type {@code hbase.pb.BulkDeleteRequest} + */ + public static final class BulkDeleteRequest extends + com.google.protobuf.GeneratedMessage + implements BulkDeleteRequestOrBuilder { + // Use BulkDeleteRequest.newBuilder() to construct. + private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final BulkDeleteRequest defaultInstance; + public static BulkDeleteRequest getDefaultInstance() { + return defaultInstance; + } + + public BulkDeleteRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BulkDeleteRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = scan_.toBuilder(); + } + scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scan_); + scan_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + deleteType_ = value; + } + break; + } + case 24: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + rowBatchSize_ = input.readUInt32(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BulkDeleteRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkDeleteRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + /** + * Protobuf enum {@code hbase.pb.BulkDeleteRequest.DeleteType} + */ + public enum DeleteType + implements com.google.protobuf.ProtocolMessageEnum { + /** + * ROW = 0; + */ + ROW(0, 0), + /** + * FAMILY = 1; + */ + FAMILY(1, 1), + /** + * COLUMN = 2; + */ + COLUMN(2, 2), + /** + * VERSION = 3; + */ + VERSION(3, 3), + ; + + /** + * ROW = 0; + */ + public static final int ROW_VALUE = 0; + /** + * FAMILY = 1; + */ + public static final int FAMILY_VALUE = 1; + /** + * COLUMN = 2; + */ + public static final int COLUMN_VALUE = 2; + /** + * VERSION = 3; + */ + public static final int VERSION_VALUE = 3; + + + public final int getNumber() { return value; } + + public static DeleteType valueOf(int value) { + switch (value) { + case 0: return ROW; + case 1: return FAMILY; + case 2: return COLUMN; + case 3: return VERSION; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public DeleteType findValueByNumber(int number) { + return DeleteType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0); + } + + private static final DeleteType[] VALUES = values(); + + public static DeleteType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private DeleteType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.BulkDeleteRequest.DeleteType) + } + + private int bitField0_; + // required .hbase.pb.Scan scan = 1; + public static final int SCAN_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + * required .hbase.pb.Scan scan = 1; + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + public static final int DELETETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_; + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() { + return deleteType_; + } + + // optional uint64 timestamp = 3; + public static final int TIMESTAMP_FIELD_NUMBER = 3; + private long timestamp_; + /** + * optional uint64 timestamp = 3; + */ + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional uint64 timestamp = 3; + */ + public long getTimestamp() { + return timestamp_; + } + + // required uint32 rowBatchSize = 4; + public static final int ROWBATCHSIZE_FIELD_NUMBER = 4; + private int rowBatchSize_; + /** + * required uint32 rowBatchSize = 4; + */ + public boolean hasRowBatchSize() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * required uint32 rowBatchSize = 4; + */ + public int getRowBatchSize() { + return rowBatchSize_; + } + + private void initFields() { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; + timestamp_ = 0L; + rowBatchSize_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasScan()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasDeleteType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasRowBatchSize()) { + memoizedIsInitialized = 0; + return false; + } + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, scan_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, deleteType_.getNumber()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt32(4, rowBatchSize_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, scan_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, deleteType_.getNumber()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, rowBatchSize_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) obj; + + boolean result = true; + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasDeleteType() == other.hasDeleteType()); + if (hasDeleteType()) { + result = result && + (getDeleteType() == other.getDeleteType()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasRowBatchSize() == other.hasRowBatchSize()); + if (hasRowBatchSize()) { + result = result && (getRowBatchSize() + == other.getRowBatchSize()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasDeleteType()) { + hash = (37 * hash) + DELETETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getDeleteType()); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasRowBatchSize()) { + hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER; + hash = (53 * hash) + getRowBatchSize(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.BulkDeleteRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; + bitField0_ = (bitField0_ & ~0x00000002); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + rowBatchSize_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest build() { + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.deleteType_ = deleteType_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.rowBatchSize_ = rowBatchSize_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance()) return this; + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasDeleteType()) { + setDeleteType(other.getDeleteType()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasRowBatchSize()) { + setRowBatchSize(other.getRowBatchSize()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasScan()) { + + return false; + } + if (!hasDeleteType()) { + + return false; + } + if (!hasRowBatchSize()) { + + return false; + } + if (!getScan().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .hbase.pb.Scan scan = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + * required .hbase.pb.Scan scan = 1; + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + /** + * required .hbase.pb.Scan scan = 1; + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + /** + * required .hbase.pb.Scan scan = 1; + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + public boolean hasDeleteType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() { + return deleteType_; + } + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + deleteType_ = value; + onChanged(); + return this; + } + /** + * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2; + */ + public Builder clearDeleteType() { + bitField0_ = (bitField0_ & ~0x00000002); + deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW; + onChanged(); + return this; + } + + // optional uint64 timestamp = 3; + private long timestamp_ ; + /** + * optional uint64 timestamp = 3; + */ + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * optional uint64 timestamp = 3; + */ + public long getTimestamp() { + return timestamp_; + } + /** + * optional uint64 timestamp = 3; + */ + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000004; + timestamp_ = value; + onChanged(); + return this; + } + /** + * optional uint64 timestamp = 3; + */ + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + onChanged(); + return this; + } + + // required uint32 rowBatchSize = 4; + private int rowBatchSize_ ; + /** + * required uint32 rowBatchSize = 4; + */ + public boolean hasRowBatchSize() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * required uint32 rowBatchSize = 4; + */ + public int getRowBatchSize() { + return rowBatchSize_; + } + /** + * required uint32 rowBatchSize = 4; + */ + public Builder setRowBatchSize(int value) { + bitField0_ |= 0x00000008; + rowBatchSize_ = value; + onChanged(); + return this; + } + /** + * required uint32 rowBatchSize = 4; + */ + public Builder clearRowBatchSize() { + bitField0_ = (bitField0_ & ~0x00000008); + rowBatchSize_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteRequest) + } + + static { + defaultInstance = new BulkDeleteRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteRequest) + } + + public interface BulkDeleteResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 rowsDeleted = 1; + /** + * required uint64 rowsDeleted = 1; + */ + boolean hasRowsDeleted(); + /** + * required uint64 rowsDeleted = 1; + */ + long getRowsDeleted(); + + // optional uint64 versionsDeleted = 2; + /** + * optional uint64 versionsDeleted = 2; + */ + boolean hasVersionsDeleted(); + /** + * optional uint64 versionsDeleted = 2; + */ + long getVersionsDeleted(); + } + /** + * Protobuf type {@code hbase.pb.BulkDeleteResponse} + */ + public static final class BulkDeleteResponse extends + com.google.protobuf.GeneratedMessage + implements BulkDeleteResponseOrBuilder { + // Use BulkDeleteResponse.newBuilder() to construct. + private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final BulkDeleteResponse defaultInstance; + public static BulkDeleteResponse getDefaultInstance() { + return defaultInstance; + } + + public BulkDeleteResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BulkDeleteResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + rowsDeleted_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionsDeleted_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public BulkDeleteResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new BulkDeleteResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required uint64 rowsDeleted = 1; + public static final int ROWSDELETED_FIELD_NUMBER = 1; + private long rowsDeleted_; + /** + * required uint64 rowsDeleted = 1; + */ + public boolean hasRowsDeleted() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint64 rowsDeleted = 1; + */ + public long getRowsDeleted() { + return rowsDeleted_; + } + + // optional uint64 versionsDeleted = 2; + public static final int VERSIONSDELETED_FIELD_NUMBER = 2; + private long versionsDeleted_; + /** + * optional uint64 versionsDeleted = 2; + */ + public boolean hasVersionsDeleted() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint64 versionsDeleted = 2; + */ + public long getVersionsDeleted() { + return versionsDeleted_; + } + + private void initFields() { + rowsDeleted_ = 0L; + versionsDeleted_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRowsDeleted()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, rowsDeleted_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, versionsDeleted_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, rowsDeleted_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, versionsDeleted_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) obj; + + boolean result = true; + result = result && (hasRowsDeleted() == other.hasRowsDeleted()); + if (hasRowsDeleted()) { + result = result && (getRowsDeleted() + == other.getRowsDeleted()); + } + result = result && (hasVersionsDeleted() == other.hasVersionsDeleted()); + if (hasVersionsDeleted()) { + result = result && (getVersionsDeleted() + == other.getVersionsDeleted()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRowsDeleted()) { + hash = (37 * hash) + ROWSDELETED_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getRowsDeleted()); + } + if (hasVersionsDeleted()) { + hash = (37 * hash) + VERSIONSDELETED_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getVersionsDeleted()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.BulkDeleteResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + rowsDeleted_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + versionsDeleted_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse build() { + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rowsDeleted_ = rowsDeleted_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.versionsDeleted_ = versionsDeleted_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()) return this; + if (other.hasRowsDeleted()) { + setRowsDeleted(other.getRowsDeleted()); + } + if (other.hasVersionsDeleted()) { + setVersionsDeleted(other.getVersionsDeleted()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRowsDeleted()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required uint64 rowsDeleted = 1; + private long rowsDeleted_ ; + /** + * required uint64 rowsDeleted = 1; + */ + public boolean hasRowsDeleted() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required uint64 rowsDeleted = 1; + */ + public long getRowsDeleted() { + return rowsDeleted_; + } + /** + * required uint64 rowsDeleted = 1; + */ + public Builder setRowsDeleted(long value) { + bitField0_ |= 0x00000001; + rowsDeleted_ = value; + onChanged(); + return this; + } + /** + * required uint64 rowsDeleted = 1; + */ + public Builder clearRowsDeleted() { + bitField0_ = (bitField0_ & ~0x00000001); + rowsDeleted_ = 0L; + onChanged(); + return this; + } + + // optional uint64 versionsDeleted = 2; + private long versionsDeleted_ ; + /** + * optional uint64 versionsDeleted = 2; + */ + public boolean hasVersionsDeleted() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * optional uint64 versionsDeleted = 2; + */ + public long getVersionsDeleted() { + return versionsDeleted_; + } + /** + * optional uint64 versionsDeleted = 2; + */ + public Builder setVersionsDeleted(long value) { + bitField0_ |= 0x00000002; + versionsDeleted_ = value; + onChanged(); + return this; + } + /** + * optional uint64 versionsDeleted = 2; + */ + public Builder clearVersionsDeleted() { + bitField0_ = (bitField0_ & ~0x00000002); + versionsDeleted_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteResponse) + } + + static { + defaultInstance = new BulkDeleteResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteResponse) + } + + /** + * Protobuf service {@code hbase.pb.BulkDeleteService} + */ + public static abstract class BulkDeleteService + implements com.google.protobuf.Service { + protected BulkDeleteService() {} + + public interface Interface { + /** + * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); + */ + public abstract void delete( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new BulkDeleteService() { + @java.lang.Override + public void delete( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, + com.google.protobuf.RpcCallback done) { + impl.delete(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); + */ + public abstract void delete( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void delete( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_BulkDeleteRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_BulkDeleteResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\020BulkDelete.proto\022\010hbase.pb\032\014Client.pro" + + "to\"\322\001\n\021BulkDeleteRequest\022\034\n\004scan\030\001 \002(\0132\016" + + ".hbase.pb.Scan\022:\n\ndeleteType\030\002 \002(\0162&.hba" + + "se.pb.BulkDeleteRequest.DeleteType\022\021\n\tti" + + "mestamp\030\003 \001(\004\022\024\n\014rowBatchSize\030\004 \002(\r\":\n\nD" + + "eleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY\020\001\022\n\n\006COLUMN" + + "\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDeleteResponse\022\023\n" + + "\013rowsDeleted\030\001 \002(\004\022\027\n\017versionsDeleted\030\002 " + + "\001(\0042X\n\021BulkDeleteService\022C\n\006delete\022\033.hba" + + "se.pb.BulkDeleteRequest\032\034.hbase.pb.BulkD", + "eleteResponseBQ\n5org.apache.hadoop.hbase" + + ".coprocessor.example.generatedB\020BulkDele" + + "teProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_hbase_pb_BulkDeleteRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_BulkDeleteRequest_descriptor, + new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", }); + internal_static_hbase_pb_BulkDeleteResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_BulkDeleteResponse_descriptor, + new java.lang.String[] { "RowsDeleted", "VersionsDeleted", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java new file mode 100644 index 0000000..b780985 --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java @@ -0,0 +1,1149 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Examples.proto + +package org.apache.hadoop.hbase.coprocessor.example.generated; + +public final class ExampleProtos { + private ExampleProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface CountRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + /** + * Protobuf type {@code hbase.pb.CountRequest} + */ + public static final class CountRequest extends + com.google.protobuf.GeneratedMessage + implements CountRequestOrBuilder { + // Use CountRequest.newBuilder() to construct. + private CountRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CountRequest defaultInstance; + public static CountRequest getDefaultInstance() { + return defaultInstance; + } + + public CountRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CountRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CountRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CountRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.CountRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest build() { + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest buildPartial() { + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest other) { + if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.CountRequest) + } + + static { + defaultInstance = new CountRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.CountRequest) + } + + public interface CountResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required int64 count = 1 [default = 0]; + /** + * required int64 count = 1 [default = 0]; + */ + boolean hasCount(); + /** + * required int64 count = 1 [default = 0]; + */ + long getCount(); + } + /** + * Protobuf type {@code hbase.pb.CountResponse} + */ + public static final class CountResponse extends + com.google.protobuf.GeneratedMessage + implements CountResponseOrBuilder { + // Use CountResponse.newBuilder() to construct. + private CountResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final CountResponse defaultInstance; + public static CountResponse getDefaultInstance() { + return defaultInstance; + } + + public CountResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private CountResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + count_ = input.readInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public CountResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new CountResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required int64 count = 1 [default = 0]; + public static final int COUNT_FIELD_NUMBER = 1; + private long count_; + /** + * required int64 count = 1 [default = 0]; + */ + public boolean hasCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int64 count = 1 [default = 0]; + */ + public long getCount() { + return count_; + } + + private void initFields() { + count_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasCount()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeInt64(1, count_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeInt64Size(1, count_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) obj; + + boolean result = true; + result = result && (hasCount() == other.hasCount()); + if (hasCount()) { + result = result && (getCount() + == other.getCount()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasCount()) { + hash = (37 * hash) + COUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCount()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.CountResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + count_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse build() { + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse buildPartial() { + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse result = new org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.count_ = count_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) { + return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other) { + if (other == org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()) return this; + if (other.hasCount()) { + setCount(other.getCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasCount()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required int64 count = 1 [default = 0]; + private long count_ ; + /** + * required int64 count = 1 [default = 0]; + */ + public boolean hasCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required int64 count = 1 [default = 0]; + */ + public long getCount() { + return count_; + } + /** + * required int64 count = 1 [default = 0]; + */ + public Builder setCount(long value) { + bitField0_ |= 0x00000001; + count_ = value; + onChanged(); + return this; + } + /** + * required int64 count = 1 [default = 0]; + */ + public Builder clearCount() { + bitField0_ = (bitField0_ & ~0x00000001); + count_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.CountResponse) + } + + static { + defaultInstance = new CountResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.CountResponse) + } + + /** + * Protobuf service {@code hbase.pb.RowCountService} + */ + public static abstract class RowCountService + implements com.google.protobuf.Service { + protected RowCountService() {} + + public interface Interface { + /** + * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); + */ + public abstract void getRowCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); + */ + public abstract void getKeyValueCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new RowCountService() { + @java.lang.Override + public void getRowCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRowCount(controller, request, done); + } + + @java.lang.Override + public void getKeyValueCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done) { + impl.getKeyValueCount(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request); + case 1: + return impl.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); + */ + public abstract void getRowCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done); + + /** + * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); + */ + public abstract void getKeyValueCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.RowCountService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRowCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); + } + + public void getKeyValueCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:hbase.pb.RowCountService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_CountRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_CountRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_CountResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_CountResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\016Examples.proto\022\010hbase.pb\"\016\n\014CountReque" + + "st\"!\n\rCountResponse\022\020\n\005count\030\001 \002(\003:\00102\226\001" + + "\n\017RowCountService\022>\n\013getRowCount\022\026.hbase" + + ".pb.CountRequest\032\027.hbase.pb.CountRespons" + + "e\022C\n\020getKeyValueCount\022\026.hbase.pb.CountRe" + + "quest\032\027.hbase.pb.CountResponseBN\n5org.ap" + + "ache.hadoop.hbase.coprocessor.example.ge" + + "neratedB\rExampleProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_hbase_pb_CountRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_CountRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_CountRequest_descriptor, + new java.lang.String[] { }); + internal_static_hbase_pb_CountResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_CountResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_CountResponse_descriptor, + new java.lang.String[] { "Count", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol/src/main/protobuf/BulkDelete.proto b/hbase-protocol/src/main/protobuf/BulkDelete.proto index c2ec8ca..16f5c30 100644 --- a/hbase-protocol/src/main/protobuf/BulkDelete.proto +++ b/hbase-protocol/src/main/protobuf/BulkDelete.proto @@ -15,7 +15,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package hbase.pb; option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated"; @@ -31,7 +30,7 @@ message BulkDeleteRequest { required DeleteType deleteType = 2; optional uint64 timestamp = 3; required uint32 rowBatchSize = 4; - + enum DeleteType { ROW = 0; FAMILY = 1; @@ -41,11 +40,11 @@ message BulkDeleteRequest { } message BulkDeleteResponse { - required uint64 rowsDeleted = 1; + required uint64 rowsDeleted = 1; optional uint64 versionsDeleted = 2; } service BulkDeleteService { rpc delete(BulkDeleteRequest) returns (BulkDeleteResponse); -} \ No newline at end of file +} diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java index 9cafe27..b952c00 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java @@ -115,7 +115,7 @@ public class MultiRowResource extends ResourceBase implements Constants { servlet.getMetrics().incrementSucessfulGetRequests(1); return Response.ok(model).build(); } - } catch (Exception e) { + } catch (IOException e) { servlet.getMetrics().incrementFailedGetRequests(1); return processException(e); } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index b9e393e..0935368 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -67,10 +67,10 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * HTable interface to remote tables accessed via REST gateway diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 7512d3e..cff9736 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.api.json.JSONJAXBContext; import com.sun.jersey.api.json.JSONMarshaller; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java index a7f14f7..64071b2 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.rsgroup; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java index 7172f06..1e355e0 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java @@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.rsgroup; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import java.io.IOException; import java.util.HashSet; @@ -79,7 +79,8 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RSGroupAdmi import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupRequest; import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupResponse; - +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD", + justification="FIX!!! TODO!!!") public class RSGroupAdminEndpoint extends RSGroupAdminService implements CoprocessorService, Coprocessor, MasterObserver { @@ -93,6 +94,7 @@ public class RSGroupAdminEndpoint extends RSGroupAdminService public void start(CoprocessorEnvironment env) throws IOException { MasterCoprocessorEnvironment menv = (MasterCoprocessorEnvironment)env; master = menv.getMasterServices(); + // Findbugs: ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD groupInfoManager = new RSGroupInfoManagerImpl(master); groupAdminServer = new RSGroupAdminServer(master, groupInfoManager); Class clazz = diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java index 7aea464..65d03b1 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java @@ -91,6 +91,8 @@ public class RSGroupAdminServer extends RSGroupAdmin { } @Override + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SWL_SLEEP_WITH_LOCK_HELD", + justification="Intentional but FIX!!!!") public void moveServers(Set servers, String targetGroupName) throws IOException { if (servers == null) { @@ -216,7 +218,7 @@ public class RSGroupAdminServer extends RSGroupAdmin { } } try { - Thread.sleep(1000); + Thread.sleep(100); // Findbugs: SWL_SLEEP_WITH_LOCK_HELD FIX!!!! } catch (InterruptedException e) { LOG.warn("Sleep interrupted", e); Thread.currentThread().interrupt(); diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java index 0e7f267..93a33bc 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java @@ -140,7 +140,8 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc Map> groupClusterState = new HashMap>(); for (HostAndPort sName : info.getServers()) { - for(ServerName curr: clusterState.keySet()) { + for(Map.Entry e: clusterState.entrySet()) { + ServerName curr = e.getKey(); if(curr.getHostPort().equals(sName)) { groupClusterState.put(curr, correctedState.get(curr)); } @@ -166,7 +167,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc ListMultimap regionMap = ArrayListMultimap.create(); ListMultimap serverMap = ArrayListMultimap.create(); generateGroupMaps(regions, servers, regionMap, serverMap); - for(String groupKey : regionMap.keySet()) { + for (String groupKey: regionMap.keySet()) { if (regionMap.get(groupKey).size() > 0) { Map> result = this.internalBalancer.roundRobinAssignment( @@ -316,7 +317,8 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc private Set getMisplacedRegions( Map regions) throws IOException { Set misplacedRegions = new HashSet(); - for (HRegionInfo region : regions.keySet()) { + for (Map.Entry e: regions.entrySet()) { + HRegionInfo region = e.getKey(); ServerName assignedServer = regions.get(region); RSGroupInfo info = RSGroupInfoManager.getRSGroup(RSGroupInfoManager.getRSGroupOfTable(region.getTable())); @@ -339,7 +341,8 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc new TreeMap>(); List misplacedRegions = new LinkedList(); correctAssignments.put(LoadBalancer.BOGUS_SERVER_NAME, new LinkedList()); - for (ServerName sName : existingAssignments.keySet()) { + for (Map.Entry e: existingAssignments.entrySet()) { + ServerName sName = e.getKey(); correctAssignments.put(sName, new LinkedList()); List regions = existingAssignments.get(sName); for (HRegionInfo region : regions) { diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java index 01efefc..c0840ab 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java @@ -25,8 +25,8 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -93,12 +93,15 @@ import org.apache.zookeeper.KeeperException; * It also makes use of zookeeper to store group information needed * for bootstrapping during offline mode. */ +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD", + justification="FIX!!! TODO!!!") public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListener { private static final Log LOG = LogFactory.getLog(RSGroupInfoManagerImpl.class); /** Table descriptor for hbase:rsgroup catalog table */ private final static HTableDescriptor RSGROUP_TABLE_DESC; static { + // Findbugs: ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD FIX!!! RSGROUP_TABLE_DESC = new HTableDescriptor(RSGROUP_TABLE_NAME_BYTES); RSGROUP_TABLE_DESC.addFamily(new HColumnDescriptor(META_FAMILY_BYTES)); RSGROUP_TABLE_DESC.setRegionSplitPolicyClassName(DisabledRegionSplitPolicy.class.getName()); @@ -123,7 +126,8 @@ public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListene private RSGroupSerDe rsGroupSerDe; private DefaultServerUpdater defaultServerUpdater; - + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SC_START_IN_CTOR", + justification="FIX!!! TODO!!!") public RSGroupInfoManagerImpl(MasterServices master) throws IOException { this.rsGroupMap = Collections.EMPTY_MAP; this.tableMap = Collections.EMPTY_MAP; @@ -134,7 +138,7 @@ public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListene rsGroupStartupWorker = new RSGroupStartupWorker(this, master, conn); prevRSGroups = new HashSet(); refresh(); - rsGroupStartupWorker.start(); + rsGroupStartupWorker.start(); // Findbugs: SC_START_IN_CTOR defaultServerUpdater = new DefaultServerUpdater(this); master.getServerManager().registerListener(this); defaultServerUpdater.start(); @@ -562,13 +566,13 @@ public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListene this.masterServices = masterServices; this.groupInfoManager = groupInfoManager; this.conn = conn; - setName(RSGroupStartupWorker.class.getName()+"-"+masterServices.getServerName()); + setName(RSGroupStartupWorker.class.getName() + "-" + masterServices.getServerName()); setDaemon(true); } @Override public void run() { - if(waitForGroupTableOnline()) { + if (waitForGroupTableOnline()) { LOG.info("GroupBasedLoadBalancer is now online"); } } @@ -586,14 +590,12 @@ public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListene try { final Table nsTable = conn.getTable(TableName.NAMESPACE_TABLE_NAME); final Table groupTable = conn.getTable(RSGROUP_TABLE_NAME); - boolean rootMetaFound = - masterServices.getMetaTableLocator().verifyMetaRegionLocation( - conn, - masterServices.getZooKeeper(), - 1); + boolean rootMetaFound = masterServices.getMetaTableLocator().verifyMetaRegionLocation( + conn, + masterServices.getZooKeeper(), + 1); final AtomicBoolean nsFound = new AtomicBoolean(false); if (rootMetaFound) { - MetaTableAccessor.Visitor visitor = new DefaultVisitorBase() { @Override public boolean visitInternal(Result row) throws IOException { @@ -654,8 +656,8 @@ public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListene createSent = true; } LOG.info("Group table: " + RSGROUP_TABLE_NAME + " isOnline: " + found.get() - + ", regionCount: " + foundRegions.size() + ", assignCount: " - + assignedRegions.size() + ", rootMetaFound: "+rootMetaFound); + + ", regionCount: " + foundRegions.size() + ", assignCount: " + + assignedRegions.size() + ", rootMetaFound: "+rootMetaFound); found.set(found.get() && assignedRegions.size() == foundRegions.size() && foundRegions.size() > 0); } else { @@ -669,9 +671,14 @@ public class RSGroupInfoManagerImpl implements RSGroupInfoManager, ServerListene //flush any inconsistencies between ZK and HTable groupInfoManager.flushConfig(groupInfoManager.rsGroupMap); } - } catch(Exception e) { + } catch (IOException e) { found.set(false); LOG.warn("Failed to perform check", e); + } catch (java.lang.InterruptedException ie) { + found.set(false); + // Restore interrupt + Thread.currentThread().interrupt(); + LOG.warn("Failed to perform check", ie); } try { Thread.sleep(100); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java index 5da0df7..869d0f7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java index 5d3cbc2..7d91f3d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest; import org.apache.hadoop.hbase.regionserver.RowProcessor; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * Convenience class that is used to make RowProcessorEndpoint invocations. * For example usage, refer TestRowProcessorEndpoint diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index cc78626..9a66471 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -39,11 +39,11 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRespo import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService; import org.apache.hadoop.hbase.regionserver.InternalScanner; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * A concrete AggregateProtocol implementation. Its system level coprocessor diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java index ab5fc78..b3b0c76 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java @@ -34,11 +34,11 @@ import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcesso import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RowProcessor; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * This class demonstrates how to implement atomic read-modify-writes diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java index caf6a14..5afde9f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseInterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java index 1076437..714e1ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import com.google.protobuf.Message; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Coprocessors implement this interface to observe and mediate endpoint invocations diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java index e771a92..87c5bb3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java @@ -41,9 +41,9 @@ import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateR import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse; import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * This class demonstrates how to implement atomic multi row transactions using diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java index 88db6b6..d5dc3f8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseInterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java index 2224414..200c433 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExc import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage; import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A ForeignException is an exception from another thread or process. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java index 9aab924..65fac8c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java @@ -27,6 +27,7 @@ import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHel import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY; import com.google.common.base.Supplier; +import com.google.protobuf.MessageLite; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; @@ -311,10 +312,12 @@ public class FanOutOneBlockAsyncDFSOutput implements AsyncFSOutput { private void setupReceiver(int timeoutMs) { AckHandler ackHandler = new AckHandler(timeoutMs); for (Channel ch : datanodeList) { + MessageLite msg = PipelineAckProto.getDefaultInstance(); ch.pipeline().addLast( new IdleStateHandler(timeoutMs, timeoutMs / 2, 0, TimeUnit.MILLISECONDS), new ProtobufVarint32FrameDecoder(), - new ProtobufDecoder(PipelineAckProto.getDefaultInstance()), ackHandler); + new ProtobufDecoder(msg), + ackHandler); ch.config().setAutoRead(true); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index 0546253..1ba5664 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -19,34 +19,6 @@ package org.apache.hadoop.hbase.io.asyncfs; import static io.netty.handler.timeout.IdleState.READER_IDLE; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Charsets; -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.protobuf.ByteString; -import com.google.protobuf.CodedOutputStream; - -import io.netty.buffer.ByteBuf; -import io.netty.buffer.ByteBufOutputStream; -import io.netty.buffer.CompositeByteBuf; -import io.netty.buffer.Unpooled; -import io.netty.channel.Channel; -import io.netty.channel.ChannelDuplexHandler; -import io.netty.channel.ChannelHandlerContext; -import io.netty.channel.ChannelOutboundHandlerAdapter; -import io.netty.channel.ChannelPipeline; -import io.netty.channel.ChannelPromise; -import io.netty.channel.SimpleChannelInboundHandler; -import io.netty.handler.codec.LengthFieldBasedFrameDecoder; -import io.netty.handler.codec.MessageToByteEncoder; -import io.netty.handler.codec.protobuf.ProtobufDecoder; -import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; -import io.netty.handler.timeout.IdleStateEvent; -import io.netty.handler.timeout.IdleStateHandler; -import io.netty.util.concurrent.Promise; - import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Field; @@ -93,6 +65,34 @@ import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Charsets; +import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.protobuf.ByteString; +import com.google.protobuf.CodedOutputStream; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufOutputStream; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.Channel; +import io.netty.channel.ChannelDuplexHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelOutboundHandlerAdapter; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.ChannelPromise; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.MessageToByteEncoder; +import io.netty.handler.codec.protobuf.ProtobufDecoder; +import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; +import io.netty.handler.timeout.IdleStateEvent; +import io.netty.handler.timeout.IdleStateHandler; +import io.netty.util.concurrent.Promise; + /** * Helper class for adding sasl support for {@link FanOutOneBlockAsyncDFSOutput}. */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java index e91699a..0de3679 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java @@ -34,7 +34,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * The request processing logic, which is usually executed in thread pools provided by an diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java index f56bf6f..1482344 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseInterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java index 68e7b65..ea9abd6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; import org.apache.hadoop.hbase.util.ReflectionUtils; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * RPC Executor that uses different queues for reads and writes. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 73226aa..57fe3f6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -135,13 +135,13 @@ import org.apache.htrace.TraceInfo; import org.codehaus.jackson.map.ObjectMapper; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.google.protobuf.BlockingService; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; /** * An RPC server that hosts protobuf described Services. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java index dd7e584..14d3d0e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java @@ -31,10 +31,10 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.authorize.PolicyProvider; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX}) @InterfaceStability.Evolving diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index 37e4e44..cc12fbd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -18,7 +18,6 @@ */ package org.apache.hadoop.hbase.mapreduce; -import com.google.protobuf.InvalidProtocolBufferException; import com.codahale.metrics.MetricRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -576,12 +575,7 @@ public class TableMapReduceUtil { public static Scan convertStringToScan(String base64) throws IOException { byte [] decoded = Base64.decode(base64); ClientProtos.Scan scan; - try { - scan = ClientProtos.Scan.parseFrom(decoded); - } catch (InvalidProtocolBufferException ipbe) { - throw new IOException(ipbe); - } - + scan = ClientProtos.Scan.parseFrom(decoded); return ProtobufUtil.toScan(scan); } @@ -798,7 +792,7 @@ public class TableMapReduceUtil { // pull necessary dependencies org.apache.zookeeper.ZooKeeper.class, io.netty.channel.Channel.class, - com.google.protobuf.Message.class, + org.apache.hadoop.hbase.shaded.com.google.protobuf.Message.class, com.google.common.collect.Lists.class, org.apache.htrace.Trace.class, com.codahale.metrics.MetricRegistry.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java index a21edcc..081556d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.mob.ExpiredMobFileCleaner; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobUtils; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * The Class ExpiredMobFileCleanerChore for running cleaner regularly to remove the expired diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 5f5cc38..2a38f04 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.master; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java index 1e6dade..589a3c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.master; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 8974945..608b687 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -18,13 +18,13 @@ */ package org.apache.hadoop.hbase.master; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.net.InetAddress; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java index cfb2023..801e249 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.procedure.MasterProcedureManagerHost; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.quotas.MasterQuotaManager; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Services Master supplies @@ -272,7 +272,7 @@ public interface MasterServices extends Server { * *

* Only a single instance may be registered for a given {@link Service} subclass (the - * instances are keyed on {@link com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. + * instances are keyed on {@link org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. * After the first registration, subsequent calls with the same service name will fail with * a return value of {@code false}. *

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index 612a8d0..990cae0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -77,8 +77,8 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * The ServerManager class manages info about region servers. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java index c884806..e27e1b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Helper class for {@link FavoredNodeLoadBalancer} that has all the intelligence diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java index fabd41a..da823b9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.master.normalizer; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java index 3c965cb..bb24473 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * The cleaner to delete the expired MOB files. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java index 8547c8c..ea1707d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java @@ -38,7 +38,7 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * The sweep tool. It deletes the mob files that are not used and merges the small mob files to diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java index 5961645..9ced8e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.monitoring; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * A MonitoredTask implementation optimized for use with RPC Handlers diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java index a29595b..7ff7db6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * A MonitoredTask implementation designed for use with RPC Handlers diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java index 085d642..c74d085 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java index 2e03a60..d02a9d5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * ZooKeeper based controller for a procedure member. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java index 8cb2237..6455942 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALKey; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @InterfaceAudience.Private public class ReplicationProtbufUtil { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java index 1eacc75..4ab74d1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java @@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Message; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; import org.apache.hadoop.hbase.security.User; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java index be2bd91..bafb005 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * Base class for RowProcessor with some default implementations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 89e723e..4306e35 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -24,14 +24,14 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; import com.google.common.io.Closeables; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.TextFormat; import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index e03993f..e41c6f6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.regionserver; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 9cfc5df..e594243 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -192,11 +192,11 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.zookeeper.KeeperException; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; /** * Implements the regionserver RPC services. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java index 9b1f82a..54855c1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.regionserver; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import java.io.IOException; import java.util.Collection; import java.util.List; @@ -551,7 +551,7 @@ public interface Region extends ConfigurationObserver { /** * Executes a single protocol buffer coprocessor endpoint {@link Service} method using * the registered protocol handlers. {@link Service} implementations must be registered via the - * {@link Region#registerService(com.google.protobuf.Service)} + * {@link Region#registerService(org.apache.hbase.shaded.com.google.protobuf.Service)} * method before they are available. * * @param controller an {@code RpcContoller} implementation to pass to the invoked service @@ -560,19 +560,19 @@ public interface Region extends ConfigurationObserver { * @return a protocol buffer {@code Message} instance containing the method's result * @throws IOException if no registered service handler is found or an error * occurs during the invocation - * @see org.apache.hadoop.hbase.regionserver.Region#registerService(com.google.protobuf.Service) + * @see org.apache.hadoop.hbase.regionserver.Region#registerService(org.apache.hbase.shaded.com.google.protobuf.Service) */ Message execService(RpcController controller, CoprocessorServiceCall call) throws IOException; /** * Registers a new protocol buffer {@link Service} subclass as a coprocessor endpoint to * be available for handling - * {@link Region#execService(com.google.protobuf.RpcController, + * {@link Region#execService(org.apache.hbase.shaded.com.google.protobuf.RpcController, * org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)}} calls. * *

* Only a single instance may be registered per region for a given {@link Service} subclass (the - * instances are keyed on {@link com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. + * instances are keyed on {@link org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. * After the first registration, subsequent calls with the same service name will fail with * a return value of {@code false}. *

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index acfaa96..b065c75 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -31,8 +31,8 @@ import java.util.regex.Matcher; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.protobuf.Message; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.commons.collections.map.AbstractReferenceMap; import org.apache.commons.collections.map.ReferenceMap; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java index 356a88b..5102101 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.wal.WAL; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Services provided by {@link HRegionServer} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java index 34901b7..bf07dd6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; /** * Defines the procedure to atomically perform multiple scans and mutations diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java index 537329a..36db3fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * This manager class handles the work dealing with snapshots for a {@link HRegionServer}. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index 0755358..0835a22 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A Protobuf based WAL has the following structure: diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 87850aa..8a7e619 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.io.IOUtils; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java index 3eb85bd..d6d0e7c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java index 197144d..aefa728 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALKey; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; /** * Helper methods to ease Region Server integration with the Write Ahead Log (WAL). diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java index b0fd176..63affa6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java @@ -79,7 +79,7 @@ import org.apache.hadoop.util.StringUtils; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * A {@link org.apache.hadoop.hbase.replication.ReplicationEndpoint} endpoint diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 0e69060..46905c2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -127,10 +127,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Provides basic authorization checks for data access and administrative diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java index d0d9b63..9b6b5b7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java @@ -99,8 +99,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; /** *

This is a customized version of the polymorphic hadoop diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java index cb143b7..7cd1135 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -40,9 +40,9 @@ import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBul import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; import org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Coprocessor service for bulk loads in secure mode. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java index a7e6113..c4162af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.security.token; import java.io.IOException; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 68817bc..4ce8834 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.security.PrivilegedExceptionAction; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index f7bcef0..f9d4d3f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -108,10 +108,10 @@ import org.apache.hadoop.hbase.util.Pair; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 1763b2f..97ec20a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java index 3bb3575..5fac47c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index d483c71..082b527 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -24,7 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Ordering; import com.google.common.collect.TreeMultimap; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.FileNotFoundException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java index 3c2203b..549d45a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * The portion of this file denoted by 'Copied from com.google.protobuf.CodedInputStream' + * The portion of this file denoted by 'Copied from org.apache.hbase.shaded.com.google.protobuf.CodedInputStream' * is from Protocol Buffers v2.4.1 under the following license * * Copyright 2008 Google Inc. All rights reserved. @@ -66,7 +66,7 @@ public abstract class ProtoUtil { * @throws IOException if it is malformed or EOF. */ public static int readRawVarint32(DataInput in) throws IOException { - // Copied from com.google.protobuf.CodedInputStream v2.4.1 readRawVarint32 + // Copied from org.apache.hbase.shaded.com.google.protobuf.CodedInputStream v2.4.1 readRawVarint32 byte tmp = in.readByte(); if (tmp >= 0) { return tmp; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java index 86fdfbd..f2db78e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * A Key for an entry in the WAL. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index 3e27834..c5b5110 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.wal; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; import java.io.EOFException; import java.io.FileNotFoundException; diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp index cc13972..ae476ad 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp @@ -19,7 +19,7 @@ --%> <%@ page contentType="text/html;charset=UTF-8" import="static org.apache.commons.lang.StringEscapeUtils.escapeXml" - import="com.google.protobuf.ByteString" + import="org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString" import="java.util.ArrayList" import="java.util.TreeMap" import="java.util.List" diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 6f225d6..6bc11e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java index 6db201f..23d1a04 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.regionserver.AnnotationReadingPriorityFunction; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java index affa9b3..5330a1f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java @@ -48,8 +48,8 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test MetaTableAccessor but without spinning up a cluster. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java index ba6e1d4..b970091 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java @@ -54,8 +54,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test {@link org.apache.hadoop.hbase.zookeeper.MetaTableLocator} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java index e6c17a5..dab6981 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({MiscTests.class, SmallTests.class}) public class TestServerLoad { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java index b59a583..df0ce33 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java @@ -70,7 +70,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Class to test HBaseAdmin. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java index d088fc4..becc0db 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java @@ -67,7 +67,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java index 515e763..cebfdc0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java @@ -45,8 +45,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test the scenario where a HRegionServer#scan() call, while scanning, timeout at client side and diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java index 679d9c9..7099c19 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java @@ -48,11 +48,11 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({MediumTests.class, ClientTests.class}) public class TestClientTimeouts { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java index 3948d18..e034b23 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java @@ -57,8 +57,8 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({SmallTests.class, ClientTests.class}) public class TestHBaseAdminNoCluster { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java index a2ce0d2..3874bdb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java index 4e4ff5e..a215c78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java @@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java index 49733d4..143c572 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java @@ -37,9 +37,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Test coprocessor endpoint that always returns {@code null} for requests to the last region diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java index aa8ef62..abff0ea 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java @@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; /** * Test coprocessor endpoint that always throws a {@link DoNotRetryIOException} for requests on diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java index cdda28a..eec9b6d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java index b743254..22f4df1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java @@ -54,7 +54,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * TestEndpoint: test cases to verify the batch execution of coprocessor Endpoint diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java index 1768a2a..dca9b27 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java @@ -60,8 +60,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * TestEndpoint: test cases to verify coprocessor Endpoint diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java index 7695361..40bc97c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java @@ -42,7 +42,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({CoprocessorTests.class, MediumTests.class}) public class TestCoprocessorTableEndpoint { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java index 1484c34..68afebb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java @@ -40,9 +40,9 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index 7cae0bc..2dfd780 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -75,7 +75,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java index f893555..17e4470 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Test that we correctly serialize exceptions from a remote source diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java index a84895f..4a06623 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java @@ -16,10 +16,8 @@ */ package org.apache.hadoop.hbase.http; -import java.io.BufferedReader; import java.io.File; import java.io.IOException; -import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; import java.security.Principal; @@ -48,8 +46,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.protocol.HttpClientContext; import org.apache.http.config.Lookup; import org.apache.http.config.RegistryBuilder; -import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.entity.ContentType; import org.apache.http.impl.auth.SPNegoSchemeFactory; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.HttpClients; @@ -220,7 +216,7 @@ public class TestSpnegoHttpServer extends HttpServerFunctionalTest { HttpClientContext context = HttpClientContext.create(); Lookup authRegistry = RegistryBuilder.create() - .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)) + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true)) .build(); HttpClient client = HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java index 45cec78..d27bba9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java @@ -28,13 +28,13 @@ import static org.mockito.internal.verification.VerificationModeFactory.times; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcChannel; import java.io.IOException; import java.net.ConnectException; import java.net.InetAddress; @@ -435,7 +435,7 @@ public abstract class AbstractTestIPC { channel .callMethod(md, new PayloadCarryingRpcController(), param, md.getOutputType().toProto(), - new com.google.protobuf.RpcCallback() { + new org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback() { @Override public void run(Message parameter) { done.set(true); @@ -472,7 +472,8 @@ public abstract class AbstractTestIPC { final AtomicBoolean done = new AtomicBoolean(false); PayloadCarryingRpcController controller = new PayloadCarryingRpcController(); - controller.notifyOnFail(new com.google.protobuf.RpcCallback() { + controller.notifyOnFail( + new org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback() { @Override public void run(IOException e) { done.set(true); @@ -482,7 +483,7 @@ public abstract class AbstractTestIPC { }); channel.callMethod(md, controller, param, md.getOutputType().toProto(), - new com.google.protobuf.RpcCallback() { + new org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback() { @Override public void run(Message parameter) { done.set(true); @@ -529,7 +530,8 @@ public abstract class AbstractTestIPC { } } - private Message setupAsyncConnection(TestRpcServer rpcServer, RpcClient client) + private org.apache.hadoop.hbase.shaded.com.google.protobuf.Message + setupAsyncConnection(TestRpcServer rpcServer, RpcClient client) throws IOException, InterruptedException, ExecutionException, java.util.concurrent.TimeoutException { InetSocketAddress address = rpcServer.getListenerAddress(); @@ -545,8 +547,10 @@ public abstract class AbstractTestIPC { AsyncRpcChannel channel = client.createRpcChannel(md.getService().getName(), serverName, User.getCurrent()); - final Future f = channel - .callMethod(md, param, null, md.getOutputType().toProto(), MessageConverter.NO_CONVERTER, + org.apache.hadoop.hbase.shaded.com.google.protobuf.Message msg = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.Message)md.getOutputType().toProto(); + final Future f = channel + .callMethod(md, param, null, msg, MessageConverter.NO_CONVERTER, null, 1000, HConstants.NORMAL_QOS); return f.get(1, TimeUnit.SECONDS); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java index 7efe198..4340970 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOutboundHandlerAdapter; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java index 82b0341..acc05f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.ipc; import static org.junit.Assert.assertEquals; -import com.google.protobuf.Descriptors; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.testclassification.SmallTests; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java index 56de07d..5e84666 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java @@ -22,8 +22,8 @@ import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java index 81869b4..aa014e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java @@ -42,10 +42,10 @@ import org.junit.Before; import org.junit.After; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test for testing protocol buffer based RPC mechanism. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java index a37ba11..476ef2f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java index 0190027..7635902 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java @@ -74,7 +74,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; -import com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; @Category({RPCTests.class, SmallTests.class}) public class TestSimpleRpcScheduler { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java index 66d7eb1..065d779 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java @@ -73,8 +73,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.collect.Multimap; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test cases for the atomic load error handling of the bulk load functionality. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index 0a86ecb..0103731 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.mockito.Mockito; -import com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; public class MockNoopMasterServices implements MasterServices, Server { private final Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 2927023..fa2c2e7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -116,9 +116,9 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * A mock RegionServer implementation. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index 35a3a79..d5ccf1e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -83,8 +83,9 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({MasterTests.class, SmallTests.class}) public class TestCatalogJanitor { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java index 37d6940..f25c65d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java @@ -48,8 +48,8 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({ MasterTests.class, MediumTests.class }) public class TestHMasterRPCException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java index 6da7a38..d66e89e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java @@ -69,7 +69,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.Mockito; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Standup the master and fake it to test various aspects of master function. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 1cc9eda..6b26ab4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -69,7 +69,7 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Test the master-related aspects of a snapshot diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java index 9d171f0..784c60c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java @@ -38,8 +38,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import java.util.ArrayList; import java.util.HashMap; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index b2d8b38..9421bc9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * Class to test ProtobufUtil. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java index 35258f2..258afdf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * A region server that will OOME. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java index 770c39b..4addbee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java @@ -45,10 +45,10 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * An implementation of {@link Table} that sits directly on a Region; it decorates the passed in diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java index 1c1a603..23f6aee 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java @@ -71,7 +71,7 @@ import org.junit.experimental.categories.Category; import com.google.common.collect.Iterators; import com.google.common.collect.Sets; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category(LargeTests.class) public class TestEndToEndSplitTransaction { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 9aa3a9b..4f2000f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.regionserver; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.ByteString; import java.io.IOException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import java.io.InterruptedIOException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java index 5fde726..5d2b812 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java @@ -46,7 +46,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({RegionServerTests.class, SmallTests.class}) public class TestHRegionInfo { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index a3804dd..d5e8ee7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -36,7 +36,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.Lists; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java index 0e60877..5fc065a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java @@ -47,7 +47,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * Tests that verify certain RPCs get a higher QoS. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java index a9115f3..f1f18ce 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java @@ -43,8 +43,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.collect.Lists; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java index f88c7dd..604bd90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java @@ -84,8 +84,8 @@ import org.junit.rules.TestName; import org.junit.rules.TestRule; import com.google.common.base.Joiner; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Like {@link TestRegionMergeTransaction} in that we're testing diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java index 99f5801..41c089e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java @@ -59,7 +59,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Tests for region replicas. Sad that we cannot isolate these without bringing up a whole diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java index 0ee75a8..2265528 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java @@ -51,7 +51,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Tests on the region server, without the master. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java index 54bee94..e00ac3a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java @@ -65,8 +65,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Here we test to make sure that scans return the expected Results when the server is sending the diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java index 470c6d1..3565e4f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java @@ -63,10 +63,10 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({RegionServerTests.class, MediumTests.class}) public class TestServerCustomProtocol { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java index 7fbcfea..46f1997 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java @@ -107,8 +107,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Like TestSplitTransaction in that we're testing {@link SplitTransactionImpl} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index f77bafe..6661d16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -59,8 +59,6 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.replication.regionserver.TestSourceFSConfigurationProvider; @@ -75,8 +73,6 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; - @Category({ReplicationTests.class, LargeTests.class}) public class TestMasterReplication { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java index 385b7b0..e6063f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java @@ -34,8 +34,8 @@ import java.util.List; import java.util.Properties; import java.util.concurrent.ThreadLocalRandom; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -65,8 +65,8 @@ import org.junit.rules.ExpectedException; import org.mockito.Mockito; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; import javax.security.sasl.SaslException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index 274fe37..293cbf7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -66,8 +66,8 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Utility methods for testing security diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index f58e24e..6b4880e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -126,11 +126,11 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Performs authorization checks for common operations, according to different diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java index d5834fd..4d36afa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java @@ -55,7 +55,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.ListMultimap; -import com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; @Category({SecurityTests.class, MediumTests.class}) public class TestNamespaceCommands extends SecureTestUtil { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java index 1d7c676..b1d03bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java @@ -47,7 +47,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({ SecurityTests.class, MediumTests.class }) public class TestGenerateDelegationToken extends SecureTestCluster { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 9382bd4..3c8655a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -85,10 +85,10 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Tests for authentication token creation and usage diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java index ab2bacc..d7d9015 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -72,7 +72,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; /** * Base test class for visibility labels basic features diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java index 06fc7be..be18db9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java @@ -43,7 +43,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsOpWithDifferentUsersNoACL { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java index f67296d..8b9fe20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java @@ -56,7 +56,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsWithACL { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java index 63c08a2..be5ffcc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java @@ -55,7 +55,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibilityLabels { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java index 1410c78..2a41710 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java @@ -54,7 +54,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLablesWithGroups { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java index 1d027d0..403ff2f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java @@ -48,7 +48,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, LargeTests.class}) public class TestWithDisabledAuthorization { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 38afc3b..51d9daa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.MD5Hash; import org.junit.Assert; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; /** * Utilities class for snapshots diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java index 835f92e..d889773 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java index bde3e49..4d84762 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java @@ -46,7 +46,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @Category({RegionServerTests.class, MediumTests.class}) public class TestWALFiltering { diff --git a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java index fa66d69..0dec3d3 100644 --- a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java +++ b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java @@ -17,8 +17,8 @@ package org.apache.hadoop.hbase.spark; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -- 2.6.1