From 26891fb029251939344f237ea096d9f59fb35186 Mon Sep 17 00:00:00 2001 From: stack Date: Thu, 21 Jul 2016 12:21:09 -0700 Subject: [PATCH] HBASE-15638 Shade protobuf In here we change all references to protobuf to instead refer to the shaded version; we preface all com.google.protobuf references with org.apache.hbase.shaded. We also purge pb references in a few places changing pb com.google.protobuf.InvalidProtocolBufferException to the lower common denominator IOE since we ultimately are returning a DeserializationException. --- .../org/apache/hadoop/hbase/MetaTableAccessor.java | 2 +- .../hadoop/hbase/client/ClientSmallScanner.java | 2 +- .../hbase/client/ConnectionImplementation.java | 6 +- .../hadoop/hbase/client/FlushRegionCallable.java | 2 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 4 +- .../org/apache/hadoop/hbase/client/HTable.java | 8 +- .../hadoop/hbase/client/MetricsConnection.java | 4 +- .../hadoop/hbase/client/MultiServerCallable.java | 2 +- .../hbase/client/RegionCoprocessorServiceExec.java | 4 +- .../hadoop/hbase/client/RpcRetryingCallerImpl.java | 2 +- .../client/RpcRetryingCallerWithReadReplicas.java | 2 +- .../hadoop/hbase/client/ScannerCallable.java | 4 +- .../hadoop/hbase/client/SecureBulkLoadClient.java | 2 +- .../java/org/apache/hadoop/hbase/client/Table.java | 8 +- .../org/apache/hadoop/hbase/client/TableState.java | 5 +- .../client/coprocessor/AggregationClient.java | 4 +- .../client/replication/ReplicationSerDeHelper.java | 2 +- .../hbase/coprocessor/ColumnInterpreter.java | 2 +- .../hadoop/hbase/filter/BinaryComparator.java | 7 +- .../hbase/filter/BinaryPrefixComparator.java | 5 +- .../apache/hadoop/hbase/filter/BitComparator.java | 5 +- .../hadoop/hbase/filter/ColumnCountGetFilter.java | 3 +- .../hbase/filter/ColumnPaginationFilter.java | 3 +- .../hadoop/hbase/filter/ColumnPrefixFilter.java | 3 +- .../hadoop/hbase/filter/ColumnRangeFilter.java | 3 +- .../hadoop/hbase/filter/DependentColumnFilter.java | 3 +- .../apache/hadoop/hbase/filter/FamilyFilter.java | 4 +- .../org/apache/hadoop/hbase/filter/FilterList.java | 4 +- .../apache/hadoop/hbase/filter/FilterWrapper.java | 4 +- .../hadoop/hbase/filter/FirstKeyOnlyFilter.java | 3 +- .../FirstKeyValueMatchingQualifiersFilter.java | 4 +- .../apache/hadoop/hbase/filter/FuzzyRowFilter.java | 4 +- .../hadoop/hbase/filter/InclusiveStopFilter.java | 4 +- .../apache/hadoop/hbase/filter/KeyOnlyFilter.java | 3 +- .../apache/hadoop/hbase/filter/LongComparator.java | 5 +- .../hadoop/hbase/filter/MultiRowRangeFilter.java | 4 +- .../hbase/filter/MultipleColumnPrefixFilter.java | 4 +- .../apache/hadoop/hbase/filter/NullComparator.java | 5 +- .../org/apache/hadoop/hbase/filter/PageFilter.java | 3 +- .../apache/hadoop/hbase/filter/PrefixFilter.java | 4 +- .../hadoop/hbase/filter/QualifierFilter.java | 4 +- .../hadoop/hbase/filter/RandomRowFilter.java | 5 +- .../hadoop/hbase/filter/RegexStringComparator.java | 5 +- .../org/apache/hadoop/hbase/filter/RowFilter.java | 4 +- .../filter/SingleColumnValueExcludeFilter.java | 4 +- .../hbase/filter/SingleColumnValueFilter.java | 3 +- .../org/apache/hadoop/hbase/filter/SkipFilter.java | 4 +- .../hadoop/hbase/filter/SubstringComparator.java | 5 +- .../hadoop/hbase/filter/TimestampsFilter.java | 3 +- .../apache/hadoop/hbase/filter/ValueFilter.java | 4 +- .../hadoop/hbase/filter/WhileMatchFilter.java | 4 +- .../apache/hadoop/hbase/ipc/AbstractRpcClient.java | 10 +- .../org/apache/hadoop/hbase/ipc/AsyncCall.java | 4 +- .../apache/hadoop/hbase/ipc/AsyncRpcChannel.java | 4 +- .../hadoop/hbase/ipc/AsyncRpcChannelImpl.java | 4 +- .../apache/hadoop/hbase/ipc/AsyncRpcClient.java | 10 +- .../hbase/ipc/AsyncServerResponseHandler.java | 2 +- .../hadoop/hbase/ipc/BlockingRpcCallback.java | 2 +- .../java/org/apache/hadoop/hbase/ipc/Call.java | 6 +- .../hadoop/hbase/ipc/CoprocessorRpcChannel.java | 6 +- .../hadoop/hbase/ipc/CoprocessorRpcUtils.java | 10 +- .../java/org/apache/hadoop/hbase/ipc/IPCUtil.java | 4 +- .../hbase/ipc/MasterCoprocessorRpcChannel.java | 6 +- .../apache/hadoop/hbase/ipc/MessageConverter.java | 4 +- .../hbase/ipc/RegionCoprocessorRpcChannel.java | 6 +- .../ipc/RegionServerCoprocessorRpcChannel.java | 6 +- .../org/apache/hadoop/hbase/ipc/RpcClient.java | 6 +- .../org/apache/hadoop/hbase/ipc/RpcClientImpl.java | 12 +- .../hadoop/hbase/ipc/ServerRpcController.java | 4 +- .../hbase/ipc/SyncCoprocessorRpcChannel.java | 10 +- .../hadoop/hbase/ipc/TimeLimitedRpcController.java | 4 +- .../hadoop/hbase/protobuf/RequestConverter.java | 2 +- .../hadoop/hbase/protobuf/ResponseConverter.java | 4 +- .../token/AuthenticationTokenIdentifier.java | 2 +- .../security/visibility/VisibilityClient.java | 2 +- .../hbase/zookeeper/MasterAddressTracker.java | 4 +- .../hadoop/hbase/zookeeper/MetaTableLocator.java | 3 +- .../org/apache/hadoop/hbase/zookeeper/ZKUtil.java | 2 +- .../hadoop/hbase/client/TestClientNoCluster.java | 6 +- .../hadoop/hbase/client/TestMetricsConnection.java | 2 +- .../hadoop/hbase/client/TestSnapshotFromAdmin.java | 2 +- .../hbase/exceptions/TestClientExceptionsUtil.java | 3 +- .../java/org/apache/hadoop/hbase/ServerName.java | 4 +- .../hbase/exceptions/DeserializationException.java | 5 +- .../java/org/apache/hadoop/hbase/types/PBType.java | 6 +- .../java/org/apache/hadoop/hbase/util/Bytes.java | 2 +- .../coprocessor/example/BulkDeleteEndpoint.java | 6 +- .../coprocessor/example/RowCountEndpoint.java | 6 +- .../example/generated/BulkDeleteProtos.java | 340 ++++++++++----------- .../example/generated/ExampleProtos.java | 336 ++++++++++---------- .../java/org/apache/hadoop/hbase/types/PBCell.java | 4 +- .../hadoop/hbase/ipc/IntegrationTestRpcClient.java | 10 +- .../apache/hadoop/hbase/procedure2/Procedure.java | 2 +- .../hbase/procedure2/RemoteProcedureException.java | 2 +- .../procedure2/store/wal/ProcedureWALFormat.java | 2 +- .../store/wal/ProcedureWALFormatReader.java | 2 +- hbase-protocol/pom.xml | 52 ++++ .../hadoop/hbase/rest/client/RemoteHTable.java | 8 +- .../hadoop/hbase/rest/model/ScannerModel.java | 2 +- .../hadoop/hbase/rsgroup/RSGroupAdminClient.java | 2 +- .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java | 6 +- .../hbase/rsgroup/RSGroupInfoManagerImpl.java | 2 +- .../apache/hadoop/hbase/client/HTableWrapper.java | 8 +- .../client/coprocessor/RowProcessorClient.java | 2 +- .../hbase/coprocessor/AggregateImplementation.java | 10 +- .../coprocessor/BaseRowProcessorEndpoint.java | 10 +- .../hbase/coprocessor/CoprocessorService.java | 2 +- .../hadoop/hbase/coprocessor/EndpointObserver.java | 4 +- .../coprocessor/MultiRowMutationEndpoint.java | 6 +- .../coprocessor/SingletonCoprocessorService.java | 2 +- .../hbase/errorhandling/ForeignException.java | 2 +- .../FanOutOneBlockAsyncDFSOutputHelper.java | 2 +- .../FanOutOneBlockAsyncDFSOutputSaslHelper.java | 33 +- .../org/apache/hadoop/hbase/ipc/CallRunner.java | 2 +- .../apache/hadoop/hbase/ipc/PriorityFunction.java | 2 +- .../hadoop/hbase/ipc/RWQueueRpcExecutor.java | 2 +- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 14 +- .../hadoop/hbase/ipc/RpcServerInterface.java | 8 +- .../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 10 +- .../hbase/master/ExpiredMobFileCleanerChore.java | 2 +- .../org/apache/hadoop/hbase/master/HMaster.java | 4 +- .../MasterAnnotationReadingPriorityFunction.java | 2 +- .../hadoop/hbase/master/MasterRpcServices.java | 14 +- .../apache/hadoop/hbase/master/MasterServices.java | 4 +- .../apache/hadoop/hbase/master/ServerManager.java | 4 +- .../balancer/FavoredNodeAssignmentHelper.java | 2 +- .../master/normalizer/SimpleRegionNormalizer.java | 2 +- .../hadoop/hbase/mob/ExpiredMobFileCleaner.java | 2 +- .../apache/hadoop/hbase/mob/mapreduce/Sweeper.java | 2 +- .../hbase/monitoring/MonitoredRPCHandler.java | 2 +- .../hbase/monitoring/MonitoredRPCHandlerImpl.java | 2 +- .../procedure/ZKProcedureCoordinatorRpcs.java | 2 +- .../hbase/procedure/ZKProcedureMemberRpcs.java | 2 +- .../hbase/protobuf/ReplicationProtbufUtil.java | 2 +- .../AnnotationReadingPriorityFunction.java | 4 +- .../hbase/regionserver/BaseRowProcessor.java | 2 +- .../apache/hadoop/hbase/regionserver/HRegion.java | 14 +- .../hadoop/hbase/regionserver/HRegionServer.java | 14 +- .../hadoop/hbase/regionserver/RSRpcServices.java | 10 +- .../apache/hadoop/hbase/regionserver/Region.java | 14 +- .../hbase/regionserver/RegionCoprocessorHost.java | 4 +- .../hbase/regionserver/RegionServerServices.java | 2 +- .../hadoop/hbase/regionserver/RowProcessor.java | 2 +- .../snapshot/RegionServerSnapshotManager.java | 2 +- .../hbase/regionserver/wal/ProtobufLogReader.java | 4 +- .../hbase/regionserver/wal/WALCellCodec.java | 2 +- .../hbase/regionserver/wal/WALEditsReplaySink.java | 2 +- .../hadoop/hbase/regionserver/wal/WALUtil.java | 2 +- .../RegionReplicaReplicationEndpoint.java | 2 +- .../hbase/security/access/AccessController.java | 8 +- .../access/HbaseObjectWritableFor96Migration.java | 4 +- .../security/access/SecureBulkLoadEndpoint.java | 6 +- .../hadoop/hbase/security/token/TokenProvider.java | 6 +- .../hadoop/hbase/security/token/TokenUtil.java | 2 +- .../security/visibility/VisibilityController.java | 8 +- .../hadoop/hbase/snapshot/SnapshotManifest.java | 4 +- .../hadoop/hbase/snapshot/SnapshotManifestV2.java | 2 +- .../org/apache/hadoop/hbase/util/HBaseFsck.java | 2 +- .../org/apache/hadoop/hbase/util/ProtoUtil.java | 4 +- .../java/org/apache/hadoop/hbase/wal/WALKey.java | 2 +- .../org/apache/hadoop/hbase/wal/WALSplitter.java | 4 +- .../main/resources/hbase-webapps/master/table.jsp | 2 +- .../hadoop/hbase/MockRegionServerServices.java | 2 +- .../org/apache/hadoop/hbase/QosTestHelper.java | 2 +- .../hbase/TestMetaTableAccessorNoCluster.java | 4 +- .../apache/hadoop/hbase/TestMetaTableLocator.java | 4 +- .../org/apache/hadoop/hbase/TestServerLoad.java | 2 +- .../org/apache/hadoop/hbase/client/TestAdmin1.java | 2 +- .../org/apache/hadoop/hbase/client/TestAdmin2.java | 2 +- .../hbase/client/TestClientScannerRPCTimeout.java | 4 +- .../hadoop/hbase/client/TestClientTimeouts.java | 10 +- .../hbase/client/TestHBaseAdminNoCluster.java | 4 +- .../apache/hadoop/hbase/client/TestMetaCache.java | 4 +- .../coprocessor/ColumnAggregationEndpoint.java | 6 +- .../ColumnAggregationEndpointNullResponse.java | 6 +- .../ColumnAggregationEndpointWithErrors.java | 6 +- .../coprocessor/ProtobufCoprocessorService.java | 6 +- .../coprocessor/TestBatchCoprocessorEndpoint.java | 2 +- .../hbase/coprocessor/TestCoprocessorEndpoint.java | 4 +- .../coprocessor/TestCoprocessorTableEndpoint.java | 2 +- .../TestRegionServerCoprocessorEndpoint.java | 6 +- .../coprocessor/TestRowProcessorEndpoint.java | 2 +- .../TestForeignExceptionSerialization.java | 2 +- .../apache/hadoop/hbase/ipc/AbstractTestIPC.java | 30 +- .../org/apache/hadoop/hbase/ipc/TestAsyncIPC.java | 4 +- .../hadoop/hbase/ipc/TestCoprocessorRpcUtils.java | 2 +- .../java/org/apache/hadoop/hbase/ipc/TestIPC.java | 4 +- .../apache/hadoop/hbase/ipc/TestProtoBufRpc.java | 8 +- .../hadoop/hbase/ipc/TestRpcHandlerException.java | 10 +- .../hadoop/hbase/ipc/TestSimpleRpcScheduler.java | 2 +- .../TestLoadIncrementalHFilesSplitRecovery.java | 4 +- .../hbase/master/MockNoopMasterServices.java | 2 +- .../hadoop/hbase/master/MockRegionServer.java | 6 +- .../hadoop/hbase/master/TestCatalogJanitor.java | 5 +- .../hbase/master/TestHMasterRPCException.java | 4 +- .../hadoop/hbase/master/TestMasterNoCluster.java | 2 +- .../master/cleaner/TestSnapshotFromMaster.java | 2 +- .../normalizer/TestSimpleRegionNormalizer.java | 4 +- .../hadoop/hbase/protobuf/TestProtobufUtil.java | 2 +- .../hbase/regionserver/OOMERegionServer.java | 2 +- .../hadoop/hbase/regionserver/RegionAsTable.java | 8 +- .../regionserver/TestEndToEndSplitTransaction.java | 2 +- .../hadoop/hbase/regionserver/TestHRegion.java | 2 +- .../hadoop/hbase/regionserver/TestHRegionInfo.java | 2 +- .../regionserver/TestHRegionReplayEvents.java | 2 +- .../hadoop/hbase/regionserver/TestPriorityRpc.java | 2 +- .../hbase/regionserver/TestRSStatusServlet.java | 4 +- .../TestRegionMergeTransactionOnCluster.java | 4 +- .../hbase/regionserver/TestRegionReplicas.java | 2 +- .../regionserver/TestRegionServerNoMaster.java | 2 +- .../regionserver/TestScannerHeartbeatMessages.java | 4 +- .../regionserver/TestServerCustomProtocol.java | 8 +- .../TestSplitTransactionOnCluster.java | 4 +- .../hbase/replication/TestMasterReplication.java | 4 - .../hbase/security/AbstractTestSecureIPC.java | 8 +- .../hbase/security/access/SecureTestUtil.java | 4 +- .../security/access/TestAccessController.java | 10 +- .../security/access/TestNamespaceCommands.java | 2 +- .../token/TestGenerateDelegationToken.java | 2 +- .../security/token/TestTokenAuthentication.java | 8 +- .../security/visibility/TestVisibilityLabels.java | 2 +- ...tVisibilityLabelsOpWithDifferentUsersNoACL.java | 2 +- .../visibility/TestVisibilityLabelsWithACL.java | 2 +- ...VisibilityLabelsWithDefaultVisLabelService.java | 2 +- .../visibility/TestVisibilityLablesWithGroups.java | 2 +- .../visibility/TestWithDisabledAuthorization.java | 2 +- .../hbase/snapshot/SnapshotTestingUtils.java | 2 +- .../hbase/snapshot/TestSnapshotManifest.java | 2 +- .../apache/hadoop/hbase/wal/TestWALFiltering.java | 2 +- .../hadoop/hbase/spark/SparkSQLPushDownFilter.java | 4 +- 230 files changed, 883 insertions(+), 847 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 3d40c70..118ee08 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -35,7 +35,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java index f9bdd55..4a0df33 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientSmallScanner.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index 1fe29c8..d860173 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -21,9 +21,9 @@ package org.apache.hadoop.hbase.client; import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java index 73bdb74..cee2779 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRespons import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * A Callable for flushRegion() RPC. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index ac98bdb..e97e675 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index fbd9f51..c2ad580 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -19,10 +19,10 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java index 4fa20e6..464c027 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; import com.codahale.metrics.Counter; import com.codahale.metrics.Histogram; import com.codahale.metrics.MetricRegistry; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java index e764ceb..5ca0dde 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Callable that handles the multi method call going against a single diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java index ad1d2a1..1101020 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java @@ -23,8 +23,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Objects; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java index cc2f159..54e8193 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.ipc.RemoteException; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Runs an rpc'ing {@link RetryingCallable}. Sets into rpc client diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java index 65dbb10..b11f495 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java index 72d69ec..a81306e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java @@ -52,8 +52,8 @@ import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.DNS; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; /** * Scanner operations such as create, next, etc. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java index 7b1547d..835590a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SecureBulkLoadClient.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.security.SecureBulkLoadUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.token.Token; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Client proxy for SecureBulkLoadProtocol diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java index f2cec97..9c57c84 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java @@ -32,10 +32,10 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Used to communicate with a single HBase table. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java index 5d4ac8e..e5b1624 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.InvalidProtocolBufferException; +import java.io.IOException; + import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -170,7 +171,7 @@ public class TableState { throws DeserializationException { try { return convert(tableName, HBaseProtos.TableState.parseFrom(bytes)); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 594a459..feb4c8b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateServi import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * This client class is for invoking the aggregate functions deployed on the diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java index 9682f89..fb1dcff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.client.replication; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.TableName; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java index e22bd24..305c306 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * Defines how value for specific column is interpreted and provides utility diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java index 3cbb7b9..2bbcd3d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -28,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A binary comparator which lexicographically compares against the specified * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}. @@ -77,7 +76,7 @@ public class BinaryComparator extends ByteArrayComparable { ComparatorProtos.BinaryComparator proto; try { proto = ComparatorProtos.BinaryComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new BinaryComparator(proto.getComparable().getValue().toByteArray()); @@ -94,4 +93,4 @@ public class BinaryComparator extends ByteArrayComparable { return super.areSerializedFieldsEqual(other); } -} +} \ No newline at end of file diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java index a26edbc..d33113d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -28,8 +29,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A comparator which compares against a specified byte array, but only compares * up to the length of this byte array. For the rest it is similar to @@ -82,7 +81,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable { ComparatorProtos.BinaryPrefixComparator proto; try { proto = ComparatorProtos.BinaryPrefixComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new BinaryPrefixComparator(proto.getComparable().getValue().toByteArray()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java index db51df7..2ecf4af 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -26,8 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A bit comparator which performs the specified bitwise operation on each of the bytes * with the specified byte array. Then returns whether the result is non-zero. @@ -90,7 +89,7 @@ public class BitComparator extends ByteArrayComparable { ComparatorProtos.BitComparator proto; try { proto = ComparatorProtos.BitComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } BitwiseOp bitwiseOp = BitwiseOp.valueOf(proto.getBitwiseOp().name()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index fd65130..6cc2927 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Simple filter that returns first N columns on row only. @@ -102,7 +101,7 @@ public class ColumnCountGetFilter extends FilterBase { FilterProtos.ColumnCountGetFilter proto; try { proto = FilterProtos.ColumnCountGetFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new ColumnCountGetFilter(proto.getLimit()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index e5ec412..1d6aa50 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter, based on the ColumnCountGetFilter, takes two arguments: limit and offset. @@ -188,7 +187,7 @@ public class ColumnPaginationFilter extends FilterBase { FilterProtos.ColumnPaginationFilter proto; try { proto = FilterProtos.ColumnPaginationFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } if (proto.hasColumnOffset()) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index ff6e8e2..32c2a5e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * This filter is used for selecting only those keys with columns that matches @@ -127,7 +126,7 @@ public class ColumnPrefixFilter extends FilterBase { FilterProtos.ColumnPrefixFilter proto; try { proto = FilterProtos.ColumnPrefixFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new ColumnPrefixFilter(proto.getPrefix().toByteArray()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 04682c5..f0aa531 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * This filter is used for selecting only those keys with columns that are @@ -193,7 +192,7 @@ public class ColumnRangeFilter extends FilterBase { FilterProtos.ColumnRangeFilter proto; try { proto = FilterProtos.ColumnRangeFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new ColumnRangeFilter(proto.hasMinColumn()?proto.getMinColumn().toByteArray():null, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index f7c6f26..6ad252d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter for adding inter-column timestamp matching @@ -241,7 +240,7 @@ public class DependentColumnFilter extends CompareFilter { FilterProtos.DependentColumnFilter proto; try { proto = FilterProtos.DependentColumnFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index b3f9a1a..34ee584 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** *

* This filter is used to filter based on the column family. It takes an @@ -99,7 +97,7 @@ public class FamilyFilter extends CompareFilter { FilterProtos.FamilyFilter proto; try { proto = FilterProtos.FamilyFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index da7a084..28b1ef5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -32,8 +32,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Implementation of {@link Filter} that represents an ordered List of Filters * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} @@ -399,7 +397,7 @@ final public class FilterList extends Filter { FilterProtos.FilterList proto; try { proto = FilterProtos.FilterList.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java index 4d7a18a..afce5c7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java @@ -28,8 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This is a Filter wrapper class which is used in the server side. Some filter * related hooks can be defined in this wrapper. The only way to create a @@ -70,7 +68,7 @@ final public class FilterWrapper extends Filter { FilterProtos.FilterWrapper proto; try { proto = FilterProtos.FilterWrapper.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index 80a1deb..8f2880a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter that will only return the first KV from each row. @@ -101,7 +100,7 @@ public class FirstKeyOnlyFilter extends FilterBase { // There is nothing to deserialize. Why do this at all? try { FilterProtos.FirstKeyOnlyFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } // Just return a new instance. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index 2e9510f..5b93c97 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -30,8 +30,8 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * The filter looks for the given columns in KeyValue. Once there is a match for diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 54402ef..6e4a5ed 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.util.UnsafeAccess; import org.apache.hadoop.hbase.util.UnsafeAvailChecker; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.InvalidProtocolBufferException; /** * This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key. @@ -274,7 +274,7 @@ public class FuzzyRowFilter extends FilterBase { FilterProtos.FuzzyRowFilter proto; try { proto = FilterProtos.FuzzyRowFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } int count = proto.getFuzzyKeysDataCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 1096f5e..733489c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; @@ -31,7 +32,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A Filter that stops after the given row. There is no "RowStopFilter" because @@ -98,7 +98,7 @@ public class InclusiveStopFilter extends FilterBase { FilterProtos.InclusiveStopFilter proto; try { proto = FilterProtos.InclusiveStopFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new InclusiveStopFilter(proto.hasStopRowKey()?proto.getStopRowKey().toByteArray():null); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index 2fd5aba..6aec5bb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * A filter that will only return the key component of each KV (the value will @@ -112,7 +111,7 @@ public class KeyOnlyFilter extends FilterBase { FilterProtos.KeyOnlyFilter proto; try { proto = FilterProtos.KeyOnlyFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new KeyOnlyFilter(proto.getLenAsVal()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java index 9c56772..c91fccc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; -import com.google.protobuf.InvalidProtocolBufferException; - import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -76,7 +75,7 @@ public class LongComparator extends ByteArrayComparable { ComparatorProtos.LongComparator proto; try { proto = ComparatorProtos.LongComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new LongComparator(Bytes.toLong(proto.getComparable().getValue().toByteArray())); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index 5f9c833..e5e8194 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Filter to support scan multiple row key ranges. It can construct the row key ranges from the * passed list which can be accessed by each region server. @@ -172,7 +170,7 @@ public class MultiRowRangeFilter extends FilterBase { FilterProtos.MultiRowRangeFilter proto; try { proto = FilterProtos.MultiRowRangeFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } int length = proto.getRowRangeListCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 0c14649..15be3ea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -31,8 +31,6 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used for selecting only those keys with columns that matches * a particular prefix. For example, if prefix is 'an', it will pass keys will @@ -134,7 +132,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { FilterProtos.MultipleColumnPrefixFilter proto; try { proto = FilterProtos.MultipleColumnPrefixFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } int numPrefixes = proto.getSortedPrefixesCount(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java index 160232f..b82cf9b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -26,8 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A binary comparator which lexicographically compares against the specified * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}. @@ -86,7 +85,7 @@ public class NullComparator extends ByteArrayComparable { try { // Just parse. Don't use what we parse since on end we are returning new NullComparator. ComparatorProtos.NullComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new NullComparator(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index adc9c54..4a95dd7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Implementation of Filter interface that limits results to a specific page * size. It terminates scanning once the number of filter-passed rows is > @@ -112,7 +111,7 @@ public class PageFilter extends FilterBase { FilterProtos.PageFilter proto; try { proto = FilterProtos.PageFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new PageFilter(proto.getPageSize()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index d09ea2c..595bfb3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.ByteBufferedCell; @@ -32,7 +33,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Pass results that have same row prefix. @@ -122,7 +122,7 @@ public class PrefixFilter extends FilterBase { FilterProtos.PrefixFilter proto; try { proto = FilterProtos.PrefixFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new PrefixFilter(proto.hasPrefix()?proto.getPrefix().toByteArray():null); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index 3aa3558..d137efa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used to filter based on the column qualifier. It takes an * operator (equal, greater, not equal, etc) and a byte [] comparator for the @@ -98,7 +96,7 @@ public class QualifierFilter extends CompareFilter { FilterProtos.QualifierFilter proto; try { proto = FilterProtos.QualifierFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index decdc78..176015b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.Random; import org.apache.hadoop.hbase.Cell; @@ -27,8 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A filter that includes rows based on a chance. * @@ -129,7 +128,7 @@ public class RandomRowFilter extends FilterBase { FilterProtos.RandomRowFilter proto; try { proto = FilterProtos.RandomRowFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new RandomRowFilter(proto.getChance()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java index 70dd1f9..0b0b9b5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.util.Arrays; @@ -38,8 +39,6 @@ import org.joni.Option; import org.joni.Regex; import org.joni.Syntax; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This comparator is for use with {@link CompareFilter} implementations, such * as {@link RowFilter}, {@link QualifierFilter}, and {@link ValueFilter}, for @@ -171,7 +170,7 @@ public class RegexStringComparator extends ByteArrayComparable { ComparatorProtos.RegexStringComparator proto; try { proto = ComparatorProtos.RegexStringComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } RegexStringComparator comparator; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 559eff8..9754a7d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used to filter based on the key. It takes an operator * (equal, greater, not equal, etc) and a byte [] comparator for the row, @@ -115,7 +113,7 @@ public class RowFilter extends CompareFilter { FilterProtos.RowFilter proto; try { proto = FilterProtos.RowFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index d030fd2..d712e1cf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -33,8 +33,6 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A {@link Filter} that checks a single column value, but does not emit the * tested column. This will enable a performance boost over @@ -149,7 +147,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { FilterProtos.SingleColumnValueExcludeFilter proto; try { proto = FilterProtos.SingleColumnValueExcludeFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index df4e482..5d42e8d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} @@ -337,7 +336,7 @@ public class SingleColumnValueFilter extends FilterBase { FilterProtos.SingleColumnValueFilter proto; try { proto = FilterProtos.SingleColumnValueFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index 3aced13..a60411b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -28,8 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A wrapper filter that filters an entire row if any of the Cell checks do * not pass. @@ -122,7 +120,7 @@ public class SkipFilter extends FilterBase { FilterProtos.SkipFilter proto; try { proto = FilterProtos.SkipFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java index d36b158..ef8b0ef 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.filter; +import java.io.IOException; import java.util.Locale; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -25,8 +26,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This comparator is for use with SingleColumnValueFilter, for filtering based on @@ -90,7 +89,7 @@ public class SubstringComparator extends ByteArrayComparable { ComparatorProtos.SubstringComparator proto; try { proto = ComparatorProtos.SubstringComparator.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new SubstringComparator(proto.getSubstr()); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index f0e5afe..03c39f0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import com.google.common.base.Preconditions; -import com.google.protobuf.InvalidProtocolBufferException; /** * Filter that returns only cells whose timestamp (version) is @@ -187,7 +186,7 @@ public class TimestampsFilter extends FilterBase { FilterProtos.TimestampsFilter proto; try { proto = FilterProtos.TimestampsFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } return new TimestampsFilter(proto.getTimestampsList(), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 2f679f0..ba2cc53 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -29,8 +29,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * This filter is used to filter based on column value. It takes an * operator (equal, greater, not equal, etc) and a byte [] comparator for the @@ -95,7 +93,7 @@ public class ValueFilter extends CompareFilter { FilterProtos.ValueFilter proto; try { proto = FilterProtos.ValueFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } final CompareOp valueCompareOp = diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index e75ca49..18a2625 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -28,8 +28,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; -import com.google.protobuf.InvalidProtocolBufferException; - /** * A wrapper filter that returns true from {@link #filterAllRemaining()} as soon * as the wrapped filters {@link Filter#filterRowKey(byte[], int, int)}, @@ -124,7 +122,7 @@ public class WhileMatchFilter extends FilterBase { FilterProtos.WhileMatchFilter proto; try { proto = FilterProtos.WhileMatchFilter.parseFrom(pbBytes); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } try { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index 3d3339a..fdd1d67 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.net.ConnectException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java index 89e6ca4..6be9dd3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncCall.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java index 8cc730f..c5716e8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; import io.netty.util.concurrent.EventExecutor; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java index 6b7dc5b..d91c006 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufOutputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java index 723a234..99d677c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcClient.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInitializer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java index 6fcca34..1975ca8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncServerResponseHandler.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufInputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java index 0475e58..92a5b92 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcCallback.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java index 73bc0e2..fb0028e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/Call.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; import java.io.IOException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.MetricsConnection; @@ -128,4 +128,4 @@ public class Call { public long getStartTime() { return this.callStats.getStartTime(); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java index 45c3700..aae3421 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; @@ -31,4 +31,4 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public @InterfaceStability.Evolving public interface CoprocessorRpcChannel extends RpcChannel, BlockingRpcChannel { -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java index 63ff3e8..e56f921 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos; import org.apache.hadoop.hbase.util.ByteStringer; @@ -36,7 +36,8 @@ public final class CoprocessorRpcUtils { */ private static final String hbaseServicePackage; static { - Descriptors.ServiceDescriptor clientService = ClientProtos.ClientService.getDescriptor(); + org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor clientService = + ClientProtos.ClientService.getDescriptor(); hbaseServicePackage = clientService.getFullName() .substring(0, clientService.getFullName().lastIndexOf(clientService.getName())); } @@ -62,7 +63,8 @@ public final class CoprocessorRpcUtils { * Returns a service call instance for the given coprocessor request. */ public static ClientProtos.CoprocessorServiceCall buildServiceCall(byte[] row, - Descriptors.MethodDescriptor method, Message request) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, + org.apache.hbase.shaded.com.google.protobuf.Message request) { return ClientProtos.CoprocessorServiceCall.newBuilder() .setRow(ByteStringer.wrap(row)) .setServiceName(CoprocessorRpcUtils.getServiceName(method.getService())) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 74f934c..5aa2529 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.base.Preconditions; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream; +import org.apache.hbase.shaded.com.google.protobuf.Message; import java.io.IOException; import java.io.OutputStream; import java.nio.BufferOverflowException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java index 6fae5cb..2235f64 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java index a85225a..08129a5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MessageConverter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; import java.io.IOException; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -38,7 +38,7 @@ public interface MessageConverter { */ O convert(M msg, CellScanner cellScanner) throws IOException; - MessageConverter NO_CONVERTER = new MessageConverter() { + MessageConverter NO_CONVERTER = new MessageConverter() { @Override public Message convert(Message msg, CellScanner cellScanner) throws IOException { return msg; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java index 55d6375..3ecaa7c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionCoprocessorRpcChannel.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java index c23d36c..801d291 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java @@ -11,9 +11,9 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java index 9d05c21..1414740 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.RpcChannel; import io.netty.util.concurrent.EventExecutor; import java.io.Closeable; import java.io.IOException; @@ -122,4 +122,4 @@ import org.apache.hadoop.hbase.security.User; * @return EventLoop */ EventExecutor getEventExecutor(); -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java index dc05af1..50c85aa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.Message.Builder; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message.Builder; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import io.netty.util.concurrent.EventExecutor; import java.io.BufferedInputStream; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java index b899eb8..aa5a16d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerRpcController.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java index 347d8a1..37d4f6d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java index cf08ea9..af5f52c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/TimeLimitedRpcController.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; import java.io.IOException; import java.util.concurrent.atomic.AtomicReference; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 5fe2016..f4b60d8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -117,7 +117,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.token.Token; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * Helper utility to build protocol buffer requests, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java index 421907d..5160b44 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java @@ -59,8 +59,8 @@ import org.apache.hadoop.hbase.regionserver.RegionOpeningState; import org.apache.hadoop.hbase.security.access.UserPermission; import org.apache.hadoop.util.StringUtils; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; /** * Helper utility to build protocol buffer responses, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java index 64e1a39..9a03a0c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java @@ -29,7 +29,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.TokenIdentifier; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * Represents the identity information stored in an HBase authentication token. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java index 7527049..bb3107c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.Visibil import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Utility client for doing visibility labels admin operations. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java index 6f4859a..9e4b9c4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MasterAddressTracker.java @@ -31,8 +31,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; -import com.google.protobuf.InvalidProtocolBufferException; - /** * Manages the location of the current active Master for the RegionServer. *

@@ -249,7 +247,7 @@ public class MasterAddressTracker extends ZooKeeperNodeTracker { int prefixLen = ProtobufUtil.lengthOfPBMagic(); try { return ZooKeeperProtos.Master.PARSER.parseFrom(data, prefixLen, data.length - prefixLen); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException(e); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java index 497e8c4..b4c4cdb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.zookeeper; -import com.google.protobuf.InvalidProtocolBufferException; import java.io.EOFException; import java.io.IOException; @@ -493,7 +492,7 @@ public class MetaTableLocator { HBaseProtos.ServerName sn = rl.getServer(); serverName = ServerName.valueOf( sn.getHostName(), sn.getPort(), sn.getStartCode()); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { throw new DeserializationException("Unable to parse meta region location"); } } else { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index f5b720e..13c6ad0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -77,7 +77,7 @@ import org.apache.zookeeper.proto.DeleteRequest; import org.apache.zookeeper.proto.SetDataRequest; import org.apache.zookeeper.server.ZooKeeperSaslServer; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Internal HBase utility class for ZooKeeper. diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index 1ece448..bc8ff90 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -90,9 +90,9 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.base.Stopwatch; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test client behavior w/o setting up a cluster. diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java index 5191880..c45de59 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMetricsConnection.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java index b41c859..f956333 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java @@ -40,7 +40,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; /** * Test snapshot logic from the client diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java index 968e55c..43daa52 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/exceptions/TestClientExceptionsUtil.java @@ -18,14 +18,13 @@ */ package org.apache.hadoop.hbase.exceptions; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.junit.Test; import java.io.IOException; import static org.junit.Assert.*; -@SuppressWarnings("ThrowableInstanceNeverThrown") public class TestClientExceptionsUtil { @Test diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java index 52db37b..7a35bf6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase; import com.google.common.net.HostAndPort; import com.google.common.net.InetAddresses; -import com.google.protobuf.InvalidProtocolBufferException; +import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; @@ -388,7 +388,7 @@ import org.apache.hadoop.hbase.util.Bytes; ZooKeeperProtos.Master.PARSER.parseFrom(data, prefixLen, data.length - prefixLen); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getMaster(); return valueOf(sn.getHostName(), sn.getPort(), sn.getStartCode()); - } catch (InvalidProtocolBufferException e) { + } catch (IOException e) { // A failed parse of the znode is pretty catastrophic. Rather than loop // retrying hoping the bad bytes will changes, and rather than change // the signature on this method to add an IOE which will send ripples all diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java index 0ce0219..179eac7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/exceptions/DeserializationException.java @@ -21,6 +21,9 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; /** * Failed deserialization. + * Usually we'll be passed a protobuf InvalidProtocolBufferException or some such. This exception + * converts protobuf or weird stuff that comes out when trying to parse stuff into a generic + * 'deserialization' exception. */ @InterfaceAudience.Private @SuppressWarnings("serial") @@ -40,4 +43,4 @@ public class DeserializationException extends HBaseException { public DeserializationException(final Throwable t) { super(t); } -} +} \ No newline at end of file diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java index 3d545f6..1d732b9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java @@ -22,9 +22,9 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * A base-class for {@link DataType} implementations backed by protobuf. See diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 7b9eb0b..593ea17 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -35,7 +35,7 @@ import java.util.Comparator; import java.util.Iterator; import java.util.List; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java index c9ab23c..b111be1 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java @@ -51,9 +51,9 @@ import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Defines a protocol to delete data in bulk based on a scan. The scan can be range scan or with diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java index 4309cdc..188d8d0 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java @@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Sample coprocessor endpoint exposing a Service interface for counting rows and key values. diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java index 373e036..51f66bb 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java @@ -6,10 +6,10 @@ package org.apache.hadoop.hbase.coprocessor.example.generated; public final class BulkDeleteProtos { private BulkDeleteProtos() {} public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { } public interface BulkDeleteRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder { // required .hbase.pb.Scan scan = 1; /** @@ -59,14 +59,14 @@ public final class BulkDeleteProtos { * Protobuf type {@code hbase.pb.BulkDeleteRequest} */ public static final class BulkDeleteRequest extends - com.google.protobuf.GeneratedMessage + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage implements BulkDeleteRequestOrBuilder { // Use BulkDeleteRequest.newBuilder() to construct. - private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private BulkDeleteRequest(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private BulkDeleteRequest(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BulkDeleteRequest defaultInstance; public static BulkDeleteRequest getDefaultInstance() { @@ -77,20 +77,20 @@ public final class BulkDeleteProtos { return defaultInstance; } - private final com.google.protobuf.UnknownFieldSet unknownFields; + private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override - public final com.google.protobuf.UnknownFieldSet + public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BulkDeleteRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { @@ -142,40 +142,40 @@ public final class BulkDeleteProtos { } } } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( + throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { + public static org.apache.hbase.shaded.com.google.protobuf.Parser PARSER = + new org.apache.hbase.shaded.com.google.protobuf.AbstractParser() { public BulkDeleteRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BulkDeleteRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public org.apache.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -183,7 +183,7 @@ public final class BulkDeleteProtos { * Protobuf enum {@code hbase.pb.BulkDeleteRequest.DeleteType} */ public enum DeleteType - implements com.google.protobuf.ProtocolMessageEnum { + implements org.apache.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { /** * ROW = 0; */ @@ -232,27 +232,27 @@ public final class BulkDeleteProtos { } } - public static com.google.protobuf.Internal.EnumLiteMap + public static org.apache.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap + private static org.apache.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { + new org.apache.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap() { public DeleteType findValueByNumber(int number) { return DeleteType.valueOf(number); } }; - public final com.google.protobuf.Descriptors.EnumValueDescriptor + public final org.apache.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } - public final com.google.protobuf.Descriptors.EnumDescriptor + public final org.apache.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } - public static final com.google.protobuf.Descriptors.EnumDescriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0); } @@ -260,7 +260,7 @@ public final class BulkDeleteProtos { private static final DeleteType[] VALUES = values(); public static DeleteType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); @@ -381,7 +381,7 @@ public final class BulkDeleteProtos { return true; } - public void writeTo(com.google.protobuf.CodedOutputStream output) + public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -406,19 +406,19 @@ public final class BulkDeleteProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(1, scan_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeEnumSize(2, deleteType_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(3, timestamp_); } if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt32Size(4, rowBatchSize_); } size += getUnknownFields().getSerializedSize(); @@ -499,24 +499,24 @@ public final class BulkDeleteProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input) @@ -525,7 +525,7 @@ public final class BulkDeleteProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -535,18 +535,18 @@ public final class BulkDeleteProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.CodedInputStream input) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -560,7 +560,7 @@ public final class BulkDeleteProtos { @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -568,14 +568,14 @@ public final class BulkDeleteProtos { * Protobuf type {@code hbase.pb.BulkDeleteRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -588,12 +588,12 @@ public final class BulkDeleteProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getScanFieldBuilder(); } } @@ -622,7 +622,7 @@ public final class BulkDeleteProtos { return create().mergeFrom(buildPartial()); } - public com.google.protobuf.Descriptors.Descriptor + public org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor; } @@ -668,7 +668,7 @@ public final class BulkDeleteProtos { return result; } - public Builder mergeFrom(com.google.protobuf.Message other) { + public Builder mergeFrom(org.apache.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other); } else { @@ -716,13 +716,13 @@ public final class BulkDeleteProtos { } public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) e.getUnfinishedMessage(); throw e; } finally { @@ -736,7 +736,7 @@ public final class BulkDeleteProtos { // required .hbase.pb.Scan scan = 1; private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hbase.shaded.com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; /** * required .hbase.pb.Scan scan = 1; @@ -837,11 +837,11 @@ public final class BulkDeleteProtos { /** * required .hbase.pb.Scan scan = 1; */ - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hbase.shaded.com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder() { if (scanBuilder_ == null) { - scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + scanBuilder_ = new org.apache.hbase.shaded.com.google.protobuf.SingleFieldBuilder< org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( scan_, getParentForChildren(), @@ -965,7 +965,7 @@ public final class BulkDeleteProtos { } public interface BulkDeleteResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder { // required uint64 rowsDeleted = 1; /** @@ -991,14 +991,14 @@ public final class BulkDeleteProtos { * Protobuf type {@code hbase.pb.BulkDeleteResponse} */ public static final class BulkDeleteResponse extends - com.google.protobuf.GeneratedMessage + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage implements BulkDeleteResponseOrBuilder { // Use BulkDeleteResponse.newBuilder() to construct. - private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private BulkDeleteResponse(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private BulkDeleteResponse(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final BulkDeleteResponse defaultInstance; public static BulkDeleteResponse getDefaultInstance() { @@ -1009,20 +1009,20 @@ public final class BulkDeleteProtos { return defaultInstance; } - private final com.google.protobuf.UnknownFieldSet unknownFields; + private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override - public final com.google.protobuf.UnknownFieldSet + public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BulkDeleteResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { @@ -1050,40 +1050,40 @@ public final class BulkDeleteProtos { } } } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( + throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { + public static org.apache.hbase.shaded.com.google.protobuf.Parser PARSER = + new org.apache.hbase.shaded.com.google.protobuf.AbstractParser() { public BulkDeleteResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new BulkDeleteResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public org.apache.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -1137,7 +1137,7 @@ public final class BulkDeleteProtos { return true; } - public void writeTo(com.google.protobuf.CodedOutputStream output) + public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -1156,11 +1156,11 @@ public final class BulkDeleteProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(1, rowsDeleted_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, versionsDeleted_); } size += getUnknownFields().getSerializedSize(); @@ -1223,24 +1223,24 @@ public final class BulkDeleteProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input) @@ -1249,7 +1249,7 @@ public final class BulkDeleteProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -1259,18 +1259,18 @@ public final class BulkDeleteProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.CodedInputStream input) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -1284,7 +1284,7 @@ public final class BulkDeleteProtos { @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1292,14 +1292,14 @@ public final class BulkDeleteProtos { * Protobuf type {@code hbase.pb.BulkDeleteResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1312,12 +1312,12 @@ public final class BulkDeleteProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { @@ -1337,7 +1337,7 @@ public final class BulkDeleteProtos { return create().mergeFrom(buildPartial()); } - public com.google.protobuf.Descriptors.Descriptor + public org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor; } @@ -1371,7 +1371,7 @@ public final class BulkDeleteProtos { return result; } - public Builder mergeFrom(com.google.protobuf.Message other) { + public Builder mergeFrom(org.apache.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other); } else { @@ -1401,13 +1401,13 @@ public final class BulkDeleteProtos { } public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) e.getUnfinishedMessage(); throw e; } finally { @@ -1500,7 +1500,7 @@ public final class BulkDeleteProtos { * Protobuf service {@code hbase.pb.BulkDeleteService} */ public static abstract class BulkDeleteService - implements com.google.protobuf.Service { + implements org.apache.hbase.shaded.com.google.protobuf.Service { protected BulkDeleteService() {} public interface Interface { @@ -1508,39 +1508,39 @@ public final class BulkDeleteProtos { * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); */ public abstract void delete( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done); } - public static com.google.protobuf.Service newReflectiveService( + public static org.apache.hbase.shaded.com.google.protobuf.Service newReflectiveService( final Interface impl) { return new BulkDeleteService() { @java.lang.Override public void delete( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done) { impl.delete(controller, request, done); } }; } - public static com.google.protobuf.BlockingService + public static org.apache.hbase.shaded.com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor + return new org.apache.hbase.shaded.com.google.protobuf.BlockingService() { + public final org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { + public final org.apache.hbase.shaded.com.google.protobuf.Message callBlockingMethod( + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.Message request) + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + @@ -1554,9 +1554,9 @@ public final class BulkDeleteProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + @@ -1570,9 +1570,9 @@ public final class BulkDeleteProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + @@ -1593,26 +1593,26 @@ public final class BulkDeleteProtos { * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse); */ public abstract void delete( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done); public static final - com.google.protobuf.Descriptors.ServiceDescriptor + org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.getDescriptor().getServices().get(0); } - public final com.google.protobuf.Descriptors.ServiceDescriptor + public final org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.Message request, + org.apache.hbase.shaded.com.google.protobuf.RpcCallback< + org.apache.hbase.shaded.com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + @@ -1621,7 +1621,7 @@ public final class BulkDeleteProtos { switch(method.getIndex()) { case 0: this.delete(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + org.apache.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; default: @@ -1629,9 +1629,9 @@ public final class BulkDeleteProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + @@ -1645,9 +1645,9 @@ public final class BulkDeleteProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + @@ -1662,31 +1662,31 @@ public final class BulkDeleteProtos { } public static Stub newStub( - com.google.protobuf.RpcChannel channel) { + org.apache.hbase.shaded.com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { + private Stub(org.apache.hbase.shaded.com.google.protobuf.RpcChannel channel) { this.channel = channel; } - private final com.google.protobuf.RpcChannel channel; + private final org.apache.hbase.shaded.com.google.protobuf.RpcChannel channel; - public com.google.protobuf.RpcChannel getChannel() { + public org.apache.hbase.shaded.com.google.protobuf.RpcChannel getChannel() { return channel; } public void delete( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( + org.apache.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance())); @@ -1694,28 +1694,28 @@ public final class BulkDeleteProtos { } public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { + org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) - throws com.google.protobuf.ServiceException; + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + private BlockingStub(org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - private final com.google.protobuf.BlockingRpcChannel channel; + private final org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse delete( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request) - throws com.google.protobuf.ServiceException { + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, @@ -1728,22 +1728,22 @@ public final class BulkDeleteProtos { // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteService) } - private static com.google.protobuf.Descriptors.Descriptor + private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkDeleteRequest_descriptor; private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_BulkDeleteResponse_descriptor; private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable; - public static com.google.protobuf.Descriptors.FileDescriptor + public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -1761,29 +1761,29 @@ public final class BulkDeleteProtos { ".coprocessor.example.generatedB\020BulkDele" + "teProtosH\001\210\001\001\240\001\001" }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( + org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_hbase_pb_BulkDeleteRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BulkDeleteRequest_descriptor, new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", }); internal_static_hbase_pb_BulkDeleteResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_BulkDeleteResponse_descriptor, new java.lang.String[] { "RowsDeleted", "VersionsDeleted", }); return null; } }; - com.google.protobuf.Descriptors.FileDescriptor + org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { + new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), }, assigner); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java index b780985..6ca5164 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/ExampleProtos.java @@ -6,23 +6,23 @@ package org.apache.hadoop.hbase.coprocessor.example.generated; public final class ExampleProtos { private ExampleProtos() {} public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { } public interface CountRequestOrBuilder - extends com.google.protobuf.MessageOrBuilder { + extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder { } /** * Protobuf type {@code hbase.pb.CountRequest} */ public static final class CountRequest extends - com.google.protobuf.GeneratedMessage + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage implements CountRequestOrBuilder { // Use CountRequest.newBuilder() to construct. - private CountRequest(com.google.protobuf.GeneratedMessage.Builder builder) { + private CountRequest(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private CountRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private CountRequest(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CountRequest defaultInstance; public static CountRequest getDefaultInstance() { @@ -33,19 +33,19 @@ public final class ExampleProtos { return defaultInstance; } - private final com.google.protobuf.UnknownFieldSet unknownFields; + private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override - public final com.google.protobuf.UnknownFieldSet + public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CountRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { initFields(); - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { @@ -63,40 +63,40 @@ public final class ExampleProtos { } } } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( + throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { + public static org.apache.hbase.shaded.com.google.protobuf.Parser PARSER = + new org.apache.hbase.shaded.com.google.protobuf.AbstractParser() { public CountRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CountRequest(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public org.apache.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -111,7 +111,7 @@ public final class ExampleProtos { return true; } - public void writeTo(com.google.protobuf.CodedOutputStream output) + public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); @@ -165,24 +165,24 @@ public final class ExampleProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom(java.io.InputStream input) @@ -191,7 +191,7 @@ public final class ExampleProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -201,18 +201,18 @@ public final class ExampleProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseDelimitedFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.CodedInputStream input) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -226,7 +226,7 @@ public final class ExampleProtos { @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -234,14 +234,14 @@ public final class ExampleProtos { * Protobuf type {@code hbase.pb.CountRequest} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -254,12 +254,12 @@ public final class ExampleProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { @@ -275,7 +275,7 @@ public final class ExampleProtos { return create().mergeFrom(buildPartial()); } - public com.google.protobuf.Descriptors.Descriptor + public org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor; } @@ -298,7 +298,7 @@ public final class ExampleProtos { return result; } - public Builder mergeFrom(com.google.protobuf.Message other) { + public Builder mergeFrom(org.apache.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)other); } else { @@ -318,13 +318,13 @@ public final class ExampleProtos { } public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest) e.getUnfinishedMessage(); throw e; } finally { @@ -347,7 +347,7 @@ public final class ExampleProtos { } public interface CountResponseOrBuilder - extends com.google.protobuf.MessageOrBuilder { + extends org.apache.hbase.shaded.com.google.protobuf.MessageOrBuilder { // required int64 count = 1 [default = 0]; /** @@ -363,14 +363,14 @@ public final class ExampleProtos { * Protobuf type {@code hbase.pb.CountResponse} */ public static final class CountResponse extends - com.google.protobuf.GeneratedMessage + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage implements CountResponseOrBuilder { // Use CountResponse.newBuilder() to construct. - private CountResponse(com.google.protobuf.GeneratedMessage.Builder builder) { + private CountResponse(org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private CountResponse(boolean noInit) { this.unknownFields = org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final CountResponse defaultInstance; public static CountResponse getDefaultInstance() { @@ -381,20 +381,20 @@ public final class ExampleProtos { return defaultInstance; } - private final com.google.protobuf.UnknownFieldSet unknownFields; + private final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override - public final com.google.protobuf.UnknownFieldSet + public final org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CountResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { @@ -417,40 +417,40 @@ public final class ExampleProtos { } } } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( + throw new org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class); } - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { + public static org.apache.hbase.shaded.com.google.protobuf.Parser PARSER = + new org.apache.hbase.shaded.com.google.protobuf.AbstractParser() { public CountResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return new CountResponse(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public org.apache.hbase.shaded.com.google.protobuf.Parser getParserForType() { return PARSER; } @@ -487,7 +487,7 @@ public final class ExampleProtos { return true; } - public void writeTo(com.google.protobuf.CodedOutputStream output) + public void writeTo(org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { @@ -503,7 +503,7 @@ public final class ExampleProtos { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream + size += org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream .computeInt64Size(1, count_); } size += getUnknownFields().getSerializedSize(); @@ -557,24 +557,24 @@ public final class ExampleProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input) @@ -583,7 +583,7 @@ public final class ExampleProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -593,18 +593,18 @@ public final class ExampleProtos { } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom( java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.CodedInputStream input) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } @@ -618,7 +618,7 @@ public final class ExampleProtos { @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -626,14 +626,14 @@ public final class ExampleProtos { * Protobuf type {@code hbase.pb.CountResponse} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.Builder implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor + public static final org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -646,12 +646,12 @@ public final class ExampleProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { @@ -669,7 +669,7 @@ public final class ExampleProtos { return create().mergeFrom(buildPartial()); } - public com.google.protobuf.Descriptors.Descriptor + public org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor; } @@ -699,7 +699,7 @@ public final class ExampleProtos { return result; } - public Builder mergeFrom(com.google.protobuf.Message other) { + public Builder mergeFrom(org.apache.hbase.shaded.com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) { return mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)other); } else { @@ -726,13 +726,13 @@ public final class ExampleProtos { } public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) + org.apache.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { + } catch (org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) e.getUnfinishedMessage(); throw e; } finally { @@ -792,7 +792,7 @@ public final class ExampleProtos { * Protobuf service {@code hbase.pb.RowCountService} */ public static abstract class RowCountService - implements com.google.protobuf.Service { + implements org.apache.hbase.shaded.com.google.protobuf.Service { protected RowCountService() {} public interface Interface { @@ -800,55 +800,55 @@ public final class ExampleProtos { * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); */ public abstract void getRowCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done); /** * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); */ public abstract void getKeyValueCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done); } - public static com.google.protobuf.Service newReflectiveService( + public static org.apache.hbase.shaded.com.google.protobuf.Service newReflectiveService( final Interface impl) { return new RowCountService() { @java.lang.Override public void getRowCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done) { impl.getRowCount(controller, request, done); } @java.lang.Override public void getKeyValueCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done) { impl.getKeyValueCount(controller, request, done); } }; } - public static com.google.protobuf.BlockingService + public static org.apache.hbase.shaded.com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { - return new com.google.protobuf.BlockingService() { - public final com.google.protobuf.Descriptors.ServiceDescriptor + return new org.apache.hbase.shaded.com.google.protobuf.BlockingService() { + public final org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } - public final com.google.protobuf.Message callBlockingMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request) - throws com.google.protobuf.ServiceException { + public final org.apache.hbase.shaded.com.google.protobuf.Message callBlockingMethod( + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.Message request) + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + @@ -864,9 +864,9 @@ public final class ExampleProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + @@ -882,9 +882,9 @@ public final class ExampleProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + @@ -907,34 +907,34 @@ public final class ExampleProtos { * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); */ public abstract void getRowCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done); /** * rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse); */ public abstract void getKeyValueCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done); + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done); public static final - com.google.protobuf.Descriptors.ServiceDescriptor + org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.getDescriptor().getServices().get(0); } - public final com.google.protobuf.Descriptors.ServiceDescriptor + public final org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( - com.google.protobuf.Descriptors.MethodDescriptor method, - com.google.protobuf.RpcController controller, - com.google.protobuf.Message request, - com.google.protobuf.RpcCallback< - com.google.protobuf.Message> done) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.Message request, + org.apache.hbase.shaded.com.google.protobuf.RpcCallback< + org.apache.hbase.shaded.com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + @@ -943,12 +943,12 @@ public final class ExampleProtos { switch(method.getIndex()) { case 0: this.getRowCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + org.apache.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; case 1: this.getKeyValueCount(controller, (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest)request, - com.google.protobuf.RpcUtil.specializeCallback( + org.apache.hbase.shaded.com.google.protobuf.RpcUtil.specializeCallback( done)); return; default: @@ -956,9 +956,9 @@ public final class ExampleProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getRequestPrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + @@ -974,9 +974,9 @@ public final class ExampleProtos { } } - public final com.google.protobuf.Message + public final org.apache.hbase.shaded.com.google.protobuf.Message getResponsePrototype( - com.google.protobuf.Descriptors.MethodDescriptor method) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + @@ -993,46 +993,46 @@ public final class ExampleProtos { } public static Stub newStub( - com.google.protobuf.RpcChannel channel) { + org.apache.hbase.shaded.com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.RowCountService implements Interface { - private Stub(com.google.protobuf.RpcChannel channel) { + private Stub(org.apache.hbase.shaded.com.google.protobuf.RpcChannel channel) { this.channel = channel; } - private final com.google.protobuf.RpcChannel channel; + private final org.apache.hbase.shaded.com.google.protobuf.RpcChannel channel; - public com.google.protobuf.RpcChannel getChannel() { + public org.apache.hbase.shaded.com.google.protobuf.RpcChannel getChannel() { return channel; } public void getRowCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( + org.apache.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); } public void getKeyValueCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request, - com.google.protobuf.RpcCallback done) { + org.apache.hbase.shaded.com.google.protobuf.RpcCallback done) { channel.callMethod( getDescriptor().getMethods().get(1), controller, request, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance(), - com.google.protobuf.RpcUtil.generalizeCallback( + org.apache.hbase.shaded.com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.getDefaultInstance())); @@ -1040,33 +1040,33 @@ public final class ExampleProtos { } public static BlockingInterface newBlockingStub( - com.google.protobuf.BlockingRpcChannel channel) { + org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException; + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException; public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException; + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { - private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + private BlockingStub(org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } - private final com.google.protobuf.BlockingRpcChannel channel; + private final org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getRowCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException { + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, @@ -1076,9 +1076,9 @@ public final class ExampleProtos { public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getKeyValueCount( - com.google.protobuf.RpcController controller, + org.apache.hbase.shaded.com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request) - throws com.google.protobuf.ServiceException { + throws org.apache.hbase.shaded.com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) channel.callBlockingMethod( getDescriptor().getMethods().get(1), controller, @@ -1091,22 +1091,22 @@ public final class ExampleProtos { // @@protoc_insertion_point(class_scope:hbase.pb.RowCountService) } - private static com.google.protobuf.Descriptors.Descriptor + private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CountRequest_descriptor; private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_CountRequest_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor + private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_CountResponse_descriptor; private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_hbase_pb_CountResponse_fieldAccessorTable; - public static com.google.protobuf.Descriptors.FileDescriptor + public static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } - private static com.google.protobuf.Descriptors.FileDescriptor + private static org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { @@ -1119,29 +1119,29 @@ public final class ExampleProtos { "ache.hadoop.hbase.coprocessor.example.ge" + "neratedB\rExampleProtosH\001\210\001\001\240\001\001" }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { + org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public org.apache.hbase.shaded.com.google.protobuf.ExtensionRegistry assignDescriptors( + org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_hbase_pb_CountRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_hbase_pb_CountRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CountRequest_descriptor, new java.lang.String[] { }); internal_static_hbase_pb_CountResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_hbase_pb_CountResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( + org.apache.hbase.shaded.com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_hbase_pb_CountResponse_descriptor, new java.lang.String[] { "Count", }); return null; } }; - com.google.protobuf.Descriptors.FileDescriptor + org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { + new org.apache.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java index c063aa9..29f2e4d 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/types/PBCell.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.types; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; +import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream; import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.util.PositionedByteRange; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index 0a7ac8a..ccdd8b9 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -56,11 +56,11 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; -import com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; @Category(IntegrationTests.class) public class IntegrationTestRpcClient { diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java index b401871..8a5c850 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.util.NonceKey; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * Base Procedure class responsible to handle the Procedure Metadata diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java index 71aae84..c3ffe62 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage; import org.apache.hadoop.hbase.util.ForeignExceptionUtil; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A RemoteProcedureException is an exception from another thread or process. diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java index 02b2db9..e404f40 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormat.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.procedure2.store.wal; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; import java.io.InputStream; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java index 8678c86..c0d2d75 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALFormatReader.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.procedure2.store.wal; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index b7846ca..04126d4 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -37,6 +37,54 @@ + + + org.apache.maven.plugins + maven-shade-plugin + 2.4.3 + + + package + + shade + + + + + com.google.protobuf + org.apache.hbase.shaded.com.google.protobuf + + + + + + commons-logging:commons-logging + com.github.stephenc.findbugs:findbugs-annotations + log4j:log4j + org.hamcrest:hamcrest-core + org.mockito:mockito-all + junit:junit + org.apache.hbase:hbase-annotations + + + + + + + + org.apache.maven.plugins maven-site-plugin @@ -106,6 +154,9 @@ + org.apache.hbase @@ -121,6 +172,7 @@ com.google.protobuf protobuf-java + 2.5.0 commons-logging diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index b9e393e..627767c 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -67,10 +67,10 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.util.StringUtils; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * HTable interface to remote tables accessed via REST gateway diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 7512d3e..05531a2 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -77,7 +77,7 @@ import org.apache.hadoop.hbase.util.Base64; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.api.json.JSONJAXBContext; import com.sun.jersey.api.json.JSONMarshaller; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java index a7f14f7..7c3350d 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.rsgroup; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java index 7172f06..e23b23c 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java @@ -22,9 +22,9 @@ package org.apache.hadoop.hbase.rsgroup; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; import java.io.IOException; import java.util.HashSet; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java index 01efefc..9a22a81 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java @@ -25,8 +25,8 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.net.HostAndPort; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java index 5da0df7..cf282ca 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableWrapper.java @@ -18,10 +18,10 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java index 5d3cbc2..58637f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest; import org.apache.hadoop.hbase.regionserver.RowProcessor; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * Convenience class that is used to make RowProcessorEndpoint invocations. * For example usage, refer TestRowProcessorEndpoint diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index cc78626..9c53374 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -39,11 +39,11 @@ import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRespo import org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService; import org.apache.hadoop.hbase.regionserver.InternalScanner; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * A concrete AggregateProtocol implementation. Its system level coprocessor diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java index ab5fc78..b413123 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java @@ -34,11 +34,11 @@ import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcesso import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RowProcessor; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * This class demonstrates how to implement atomic read-modify-writes diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java index caf6a14..19f1d27 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseInterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java index 1076437..68bcf94 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/EndpointObserver.java @@ -26,8 +26,8 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import com.google.protobuf.Message; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Coprocessors implement this interface to observe and mediate endpoint invocations diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java index e771a92..3750a8a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java @@ -41,9 +41,9 @@ import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateR import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse; import org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationService; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * This class demonstrates how to implement atomic multi row transactions using diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java index 88db6b6..98cd444 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseInterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java index 2224414..3429784 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExc import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage; import org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A ForeignException is an exception from another thread or process. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java index 51c48ce..25aa59f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java @@ -30,7 +30,7 @@ import static org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStag import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.protobuf.CodedOutputStream; +import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBuf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index 0546253..0ae14af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -25,8 +25,8 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.ByteString; -import com.google.protobuf.CodedOutputStream; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufOutputStream; @@ -93,6 +93,30 @@ import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import com.google.common.base.Charsets; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufOutputStream; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.Channel; +import io.netty.channel.ChannelDuplexHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelOutboundHandlerAdapter; +import io.netty.channel.ChannelPipeline; +import io.netty.channel.ChannelPromise; +import io.netty.channel.SimpleChannelInboundHandler; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import io.netty.handler.codec.MessageToByteEncoder; +import io.netty.handler.codec.protobuf.ProtobufDecoder; +import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; +import io.netty.handler.timeout.IdleStateEvent; +import io.netty.handler.timeout.IdleStateHandler; +import io.netty.util.concurrent.Promise; + /** * Helper class for adding sasl support for {@link FanOutOneBlockAsyncDFSOutput}. */ @@ -754,7 +778,10 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper { .newBuilder(); builder.setStatus(DataTransferEncryptorStatus.SUCCESS); if (payload != null) { - builder.setPayload(ByteString.copyFrom(payload)); + // NOTE!!! Explicit reference to HDFS's transitively included protobuf! HBase has + // shaded and relocated the protobuf it uses to be at + // org.apache.hbase.shaded.com.google.protobuf. + builder.setPayload(com.google.protobuf.ByteString.copyFrom(payload)); } if (options != null) { CIPHER_OPTION_HELPER.addCipherOptions(builder, options); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java index e91699a..bb8a646 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CallRunner.java @@ -34,7 +34,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * The request processing logic, which is usually executed in thread pools provided by an diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java index f56bf6f..42a12e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/PriorityFunction.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HBaseInterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java index 68e7b65..c1c116b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; import org.apache.hadoop.hbase.util.ReflectionUtils; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * RPC Executor that uses different queues for reads and writes. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 73226aa..11b2df0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -135,13 +135,13 @@ import org.apache.htrace.TraceInfo; import org.codehaus.jackson.map.ObjectMapper; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import com.google.protobuf.BlockingService; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.CodedOutputStream; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hbase.shaded.com.google.protobuf.CodedOutputStream; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; /** * An RPC server that hosts protobuf described Services. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java index dd7e584..4c13209 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java @@ -31,10 +31,10 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.authorize.PolicyProvider; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC, HBaseInterfaceAudience.PHOENIX}) @InterfaceStability.Evolving diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index 37e4e44..dcf23a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -18,7 +18,6 @@ */ package org.apache.hadoop.hbase.mapreduce; -import com.google.protobuf.InvalidProtocolBufferException; import com.codahale.metrics.MetricRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -576,12 +575,7 @@ public class TableMapReduceUtil { public static Scan convertStringToScan(String base64) throws IOException { byte [] decoded = Base64.decode(base64); ClientProtos.Scan scan; - try { - scan = ClientProtos.Scan.parseFrom(decoded); - } catch (InvalidProtocolBufferException ipbe) { - throw new IOException(ipbe); - } - + scan = ClientProtos.Scan.parseFrom(decoded); return ProtobufUtil.toScan(scan); } @@ -798,7 +792,7 @@ public class TableMapReduceUtil { // pull necessary dependencies org.apache.zookeeper.ZooKeeper.class, io.netty.channel.Channel.class, - com.google.protobuf.Message.class, + org.apache.hbase.shaded.com.google.protobuf.Message.class, com.google.common.collect.Lists.class, org.apache.htrace.Trace.class, com.codahale.metrics.MetricRegistry.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java index a21edcc..eda50ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ExpiredMobFileCleanerChore.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.mob.ExpiredMobFileCleaner; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.mob.MobUtils; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * The Class ExpiredMobFileCleanerChore for running cleaner regularly to remove the expired diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 9196368..7eb3ebc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.master; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Service; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java index 1e6dade..a3bdd81 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterAnnotationReadingPriorityFunction.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.master; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 986ff6e..7fc37e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -18,13 +18,13 @@ */ package org.apache.hadoop.hbase.master; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.net.InetAddress; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java index 9b91572..964fb7a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.procedure.MasterProcedureManagerHost; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.quotas.MasterQuotaManager; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Services Master supplies @@ -272,7 +272,7 @@ public interface MasterServices extends Server { * *

* Only a single instance may be registered for a given {@link Service} subclass (the - * instances are keyed on {@link com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. + * instances are keyed on {@link org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. * After the first registration, subsequent calls with the same service name will fail with * a return value of {@code false}. *

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index ffdbd17..2279fa4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -77,8 +77,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.zookeeper.KeeperException; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * The ServerManager class manages info about region servers. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java index c884806..70e5faf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredNodeAssignmentHelper.java @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Helper class for {@link FavoredNodeLoadBalancer} that has all the intelligence diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java index fabd41a..4661211 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/normalizer/SimpleRegionNormalizer.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.master.normalizer; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java index 3c965cb..ae4c4ee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/ExpiredMobFileCleaner.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * The cleaner to delete the expired MOB files. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java index 8547c8c..0b32a5f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/mapreduce/Sweeper.java @@ -38,7 +38,7 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * The sweep tool. It deletes the mob files that are not used and merges the small mob files to diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java index 5961645..a91cdf5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.monitoring; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * A MonitoredTask implementation optimized for use with RPC Handlers diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java index a29595b..f0c0a1ed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Operation; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * A MonitoredTask implementation designed for use with RPC Handlers diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java index 085d642..fb05e79 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java index 2e03a60..f480ea3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * ZooKeeper based controller for a procedure member. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java index 8cb2237..f7afde8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.wal.WALKey; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @InterfaceAudience.Private public class ReplicationProtbufUtil { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java index 1eacc75..777519a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java @@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Message; -import com.google.protobuf.TextFormat; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; import org.apache.hadoop.hbase.security.User; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java index be2bd91..5a7482c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * Base class for RowProcessor with some default implementations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 9f6a03a..b25f3a8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -24,14 +24,14 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; import com.google.common.io.Closeables; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.TextFormat; import java.io.EOFException; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index e03993f..2d8bb34 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.regionserver; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.Descriptors; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 9cfc5df..750d15f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -192,11 +192,11 @@ import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.zookeeper.KeeperException; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; /** * Implements the regionserver RPC services. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java index 9b1f82a..e2c4797 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.regionserver; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; import java.io.IOException; import java.util.Collection; import java.util.List; @@ -551,7 +551,7 @@ public interface Region extends ConfigurationObserver { /** * Executes a single protocol buffer coprocessor endpoint {@link Service} method using * the registered protocol handlers. {@link Service} implementations must be registered via the - * {@link Region#registerService(com.google.protobuf.Service)} + * {@link Region#registerService(org.apache.hbase.shaded.com.google.protobuf.Service)} * method before they are available. * * @param controller an {@code RpcContoller} implementation to pass to the invoked service @@ -560,19 +560,19 @@ public interface Region extends ConfigurationObserver { * @return a protocol buffer {@code Message} instance containing the method's result * @throws IOException if no registered service handler is found or an error * occurs during the invocation - * @see org.apache.hadoop.hbase.regionserver.Region#registerService(com.google.protobuf.Service) + * @see org.apache.hadoop.hbase.regionserver.Region#registerService(org.apache.hbase.shaded.com.google.protobuf.Service) */ Message execService(RpcController controller, CoprocessorServiceCall call) throws IOException; /** * Registers a new protocol buffer {@link Service} subclass as a coprocessor endpoint to * be available for handling - * {@link Region#execService(com.google.protobuf.RpcController, + * {@link Region#execService(org.apache.hbase.shaded.com.google.protobuf.RpcController, * org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)}} calls. * *

* Only a single instance may be registered per region for a given {@link Service} subclass (the - * instances are keyed on {@link com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. + * instances are keyed on {@link org.apache.hbase.shaded.com.google.protobuf.Descriptors.ServiceDescriptor#getFullName()}. * After the first registration, subsequent calls with the same service name will fail with * a return value of {@code false}. *

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index f6ccaa1..b558b29 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -31,8 +31,8 @@ import java.util.regex.Matcher; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.protobuf.Message; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; import org.apache.commons.collections.map.AbstractReferenceMap; import org.apache.commons.collections.map.ReferenceMap; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java index 356a88b..5cc5115 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.wal.WAL; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Services provided by {@link HRegionServer} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java index 34901b7..428a285 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; /** * Defines the procedure to atomically perform multiple scans and mutations diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java index 537329a..a1d0675 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * This manager class handles the work dealing with snapshots for a {@link HRegionServer}. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index 0755358..5d51873 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -44,8 +44,8 @@ import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALTrailer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * A Protobuf based WAL has the following structure: diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 87850aa..3899b2c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.io.IOUtils; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java index 3eb85bd..12595e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java index 197144d..9100afc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALUtil.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALKey; -import com.google.protobuf.TextFormat; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; /** * Helper methods to ease Region Server integration with the Write Ahead Log (WAL). diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java index b0fd176..10b9af5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java @@ -79,7 +79,7 @@ import org.apache.hadoop.util.StringUtils; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * A {@link org.apache.hadoop.hbase.replication.ReplicationEndpoint} endpoint diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index 7d5fc32..09280ba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -128,10 +128,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.google.protobuf.Message; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Provides basic authorization checks for data access and administrative diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java index d0d9b63..0ff5212 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/HbaseObjectWritableFor96Migration.java @@ -99,8 +99,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableUtils; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; /** *

This is a customized version of the polymorphic hadoop diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java index cb143b7..2d4af19 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -40,9 +40,9 @@ import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBul import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse; import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService; import org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Coprocessor service for bulk loads in secure mode. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java index a7e6113..25a5d67 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.security.token; import java.io.IOException; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 68817bc..b697344 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.security.PrivilegedExceptionAction; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 3fa3fa3..50f18f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -109,10 +109,10 @@ import org.apache.hadoop.hbase.util.Pair; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; -import com.google.protobuf.ByteString; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Coprocessor that has both the MasterObserver and RegionObserver implemented that supports in diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 1763b2f..6b391ce 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.CodedInputStream; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.CodedInputStream; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java index 3bb3575..93a4c48 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import java.io.IOException; import java.io.InterruptedIOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 6e8bcc8..90cf99d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -25,7 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Ordering; import com.google.common.collect.TreeMultimap; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.io.Closeable; import java.io.FileNotFoundException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java index 3c2203b..549d45a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. * - * The portion of this file denoted by 'Copied from com.google.protobuf.CodedInputStream' + * The portion of this file denoted by 'Copied from org.apache.hbase.shaded.com.google.protobuf.CodedInputStream' * is from Protocol Buffers v2.4.1 under the following license * * Copyright 2008 Google Inc. All rights reserved. @@ -66,7 +66,7 @@ public abstract class ProtoUtil { * @throws IOException if it is malformed or EOF. */ public static int readRawVarint32(DataInput in) throws IOException { - // Copied from com.google.protobuf.CodedInputStream v2.4.1 readRawVarint32 + // Copied from org.apache.hbase.shaded.com.google.protobuf.CodedInputStream v2.4.1 readRawVarint32 byte tmp = in.readByte(); if (tmp >= 0) { return tmp; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java index 86fdfbd..ec9b11d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import com.google.common.annotations.VisibleForTesting; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * A Key for an entry in the WAL. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index 3e27834..208f876 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.wal; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; -import com.google.protobuf.TextFormat; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.TextFormat; import java.io.EOFException; import java.io.FileNotFoundException; diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp index cc13972..e9fd862 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp @@ -19,7 +19,7 @@ --%> <%@ page contentType="text/html;charset=UTF-8" import="static org.apache.commons.lang.StringEscapeUtils.escapeXml" - import="com.google.protobuf.ByteString" + import="org.apache.hbase.shaded.com.google.protobuf.ByteString" import="java.util.ArrayList" import="java.util.TreeMap" import="java.util.List" diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java index 6f225d6..90d881a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MockRegionServerServices.java @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Basic mock region server services. Should only be instantiated by HBaseTestingUtility.b diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java index 6db201f..0cf0047 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/QosTestHelper.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.regionserver.AnnotationReadingPriorityFunction; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java index affa9b3..e235f04 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java @@ -48,8 +48,8 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test MetaTableAccessor but without spinning up a cluster. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java index ba6e1d4..a6b6883 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableLocator.java @@ -54,8 +54,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test {@link org.apache.hadoop.hbase.zookeeper.MetaTableLocator} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java index e6c17a5..ff9270f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestServerLoad.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({MiscTests.class, SmallTests.class}) public class TestServerLoad { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java index b59a583..5de4af1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin1.java @@ -70,7 +70,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Class to test HBaseAdmin. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java index d088fc4..9340e7b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAdmin2.java @@ -67,7 +67,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java index 515e763..9835e5d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientScannerRPCTimeout.java @@ -45,8 +45,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test the scenario where a HRegionServer#scan() call, while scanning, timeout at client side and diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java index 679d9c9..9c0e4c9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestClientTimeouts.java @@ -48,11 +48,11 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({MediumTests.class, ClientTests.class}) public class TestClientTimeouts { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java index 3948d18..f30eb65 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java @@ -57,8 +57,8 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({SmallTests.class, ClientTests.class}) public class TestHBaseAdminNoCluster { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java index a2ce0d2..855a9bd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMetaCache.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.client; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java index 4e4ff5e..9e6f718 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java @@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java index 49733d4..68738fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointNullResponse.java @@ -37,9 +37,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Test coprocessor endpoint that always returns {@code null} for requests to the last region diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java index aa8ef62..b3e0894 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpointWithErrors.java @@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; /** * Test coprocessor endpoint that always throws a {@link DoNotRetryIOException} for requests on diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java index cdda28a..1a09d80 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/ProtobufCoprocessorService.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java index b743254..9c341bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java @@ -54,7 +54,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * TestEndpoint: test cases to verify the batch execution of coprocessor Endpoint diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java index 1768a2a..987d871 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java @@ -60,8 +60,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * TestEndpoint: test cases to verify coprocessor Endpoint diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java index 7695361..a56993d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java @@ -42,7 +42,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({CoprocessorTests.class, MediumTests.class}) public class TestCoprocessorTableEndpoint { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java index 1484c34..89b8127 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java @@ -40,9 +40,9 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java index 7cae0bc..361c1e3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java @@ -75,7 +75,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java index f893555..40abb49 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Test that we correctly serialize exceptions from a remote source diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java index 45cec78..5586729 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java @@ -28,13 +28,13 @@ import static org.mockito.internal.verification.VerificationModeFactory.times; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.RpcChannel; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcChannel; import java.io.IOException; import java.net.ConnectException; import java.net.InetAddress; @@ -435,7 +435,7 @@ public abstract class AbstractTestIPC { channel .callMethod(md, new PayloadCarryingRpcController(), param, md.getOutputType().toProto(), - new com.google.protobuf.RpcCallback() { + new org.apache.hbase.shaded.com.google.protobuf.RpcCallback() { @Override public void run(Message parameter) { done.set(true); @@ -472,7 +472,8 @@ public abstract class AbstractTestIPC { final AtomicBoolean done = new AtomicBoolean(false); PayloadCarryingRpcController controller = new PayloadCarryingRpcController(); - controller.notifyOnFail(new com.google.protobuf.RpcCallback() { + controller.notifyOnFail( + new org.apache.hbase.shaded.com.google.protobuf.RpcCallback() { @Override public void run(IOException e) { done.set(true); @@ -482,7 +483,7 @@ public abstract class AbstractTestIPC { }); channel.callMethod(md, controller, param, md.getOutputType().toProto(), - new com.google.protobuf.RpcCallback() { + new org.apache.hbase.shaded.com.google.protobuf.RpcCallback() { @Override public void run(Message parameter) { done.set(true); @@ -529,7 +530,8 @@ public abstract class AbstractTestIPC { } } - private Message setupAsyncConnection(TestRpcServer rpcServer, RpcClient client) + private org.apache.hbase.shaded.com.google.protobuf.Message + setupAsyncConnection(TestRpcServer rpcServer, RpcClient client) throws IOException, InterruptedException, ExecutionException, java.util.concurrent.TimeoutException { InetSocketAddress address = rpcServer.getListenerAddress(); @@ -545,8 +547,10 @@ public abstract class AbstractTestIPC { AsyncRpcChannel channel = client.createRpcChannel(md.getService().getName(), serverName, User.getCurrent()); - final Future f = channel - .callMethod(md, param, null, md.getOutputType().toProto(), MessageConverter.NO_CONVERTER, + org.apache.hbase.shaded.com.google.protobuf.Message msg = + (org.apache.hbase.shaded.com.google.protobuf.Message)md.getOutputType().toProto(); + final Future f = channel + .callMethod(md, param, null, msg, MessageConverter.NO_CONVERTER, null, 1000, HConstants.NORMAL_QOS); return f.get(1, TimeUnit.SECONDS); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java index 7efe198..8885769 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestAsyncIPC.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOutboundHandlerAdapter; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java index 82b0341..75ac1f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.ipc; import static org.junit.Assert.assertEquals; -import com.google.protobuf.Descriptors; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.testclassification.SmallTests; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java index 56de07d..44ec51a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestIPC.java @@ -22,8 +22,8 @@ import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; -import com.google.protobuf.ByteString; -import com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java index 81869b4..8820ec7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java @@ -42,10 +42,10 @@ import org.junit.Before; import org.junit.After; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test for testing protocol buffer based RPC mechanism. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java index a37ba11..256ee8e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java @@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.ipc; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingService; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java index 0190027..e6a1dc7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java @@ -74,7 +74,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; -import com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Message; @Category({RPCTests.class, SmallTests.class}) public class TestSimpleRpcScheduler { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java index 66d7eb1..6ddbbe5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java @@ -73,8 +73,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.collect.Multimap; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test cases for the atomic load error handling of the bulk load functionality. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java index c7a42d9..e8e350f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockNoopMasterServices.java @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.mockito.Mockito; -import com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.Service; public class MockNoopMasterServices implements MasterServices, Server { private final Configuration conf; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java index 2927023..565974c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/MockRegionServer.java @@ -116,9 +116,9 @@ import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * A mock RegionServer implementation. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java index 35a3a79..bccf875 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java @@ -83,8 +83,9 @@ import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({MasterTests.class, SmallTests.class}) public class TestCatalogJanitor { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java index 37d6940..733ac29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java @@ -48,8 +48,8 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({ MasterTests.class, MediumTests.class }) public class TestHMasterRPCException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java index 6da7a38..3aa90fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterNoCluster.java @@ -69,7 +69,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestRule; import org.mockito.Mockito; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Standup the master and fake it to test various aspects of master function. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 1cc9eda..175fba9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -69,7 +69,7 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Test the master-related aspects of a snapshot diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java index 9d171f0..3e91b09 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java @@ -38,8 +38,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import java.util.ArrayList; import java.util.HashMap; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java index b2d8b38..a3b5b23 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestProtobufUtil.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * Class to test ProtobufUtil. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java index 35258f2..b43d2de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.RequestConverter; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * A region server that will OOME. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java index 770c39b..5d01dce 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java @@ -45,10 +45,10 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; -import com.google.protobuf.Descriptors.MethodDescriptor; -import com.google.protobuf.Message; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; +import org.apache.hbase.shaded.com.google.protobuf.Message; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * An implementation of {@link Table} that sits directly on a Region; it decorates the passed in diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java index 1c1a603..214f62e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java @@ -71,7 +71,7 @@ import org.junit.experimental.categories.Category; import com.google.common.collect.Iterators; import com.google.common.collect.Sets; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category(LargeTests.class) public class TestEndToEndSplitTransaction { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 9aa3a9b..5e90874 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.regionserver; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.ByteString; import java.io.IOException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; import java.io.InterruptedIOException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java index 5fde726..51d4eca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java @@ -46,7 +46,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({RegionServerTests.class, SmallTests.class}) public class TestHRegionInfo { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index a3804dd..f982ed2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -36,7 +36,7 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.Lists; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java index 0e60877..a74d9ba 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPriorityRpc.java @@ -47,7 +47,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * Tests that verify certain RPCs get a higher QoS. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java index a9115f3..c95f29d2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java @@ -43,8 +43,8 @@ import org.junit.experimental.categories.Category; import org.mockito.Mockito; import com.google.common.collect.Lists; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java index f88c7dd..876e44b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java @@ -84,8 +84,8 @@ import org.junit.rules.TestName; import org.junit.rules.TestRule; import com.google.common.base.Joiner; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Like {@link TestRegionMergeTransaction} in that we're testing diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java index 99f5801..460f1c2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionReplicas.java @@ -59,7 +59,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Tests for region replicas. Sad that we cannot isolate these without bringing up a whole diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java index 0ee75a8..336f91e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerNoMaster.java @@ -51,7 +51,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Tests on the region server, without the master. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java index 54bee94..c51c913 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestScannerHeartbeatMessages.java @@ -65,8 +65,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Here we test to make sure that scans return the expected Results when the server is sending the diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java index 470c6d1..85ef043 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java @@ -63,10 +63,10 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({RegionServerTests.class, MediumTests.class}) public class TestServerCustomProtocol { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java index 7fbcfea..03c8808 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java @@ -107,8 +107,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Like TestSplitTransaction in that we're testing {@link SplitTransactionImpl} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java index 02040e9..878d537 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java @@ -59,8 +59,6 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.replication.regionserver.TestSourceFSConfigurationProvider; @@ -75,8 +73,6 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; - @Category({ReplicationTests.class, LargeTests.class}) public class TestMasterReplication { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java index 385b7b0..92efc75 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/AbstractTestSecureIPC.java @@ -34,8 +34,8 @@ import java.util.List; import java.util.Properties; import java.util.concurrent.ThreadLocalRandom; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -65,8 +65,8 @@ import org.junit.rules.ExpectedException; import org.mockito.Mockito; import com.google.common.collect.Lists; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; import javax.security.sasl.SaslException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index 274fe37..407597b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -66,8 +66,8 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Utility methods for testing security diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index f58e24e..1d11696 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -126,11 +126,11 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.RpcCallback; -import com.google.protobuf.RpcController; -import com.google.protobuf.Service; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.RpcCallback; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.Service; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Performs authorization checks for common operations, according to different diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java index d5834fd..4625613 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java @@ -55,7 +55,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.ListMultimap; -import com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; @Category({SecurityTests.class, MediumTests.class}) public class TestNamespaceCommands extends SecureTestUtil { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java index 1d7c676..e4197f4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestGenerateDelegationToken.java @@ -47,7 +47,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({ SecurityTests.class, MediumTests.class }) public class TestGenerateDelegationToken extends SecureTestCluster { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 9382bd4..b66e4e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -85,10 +85,10 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.BlockingRpcChannel; -import com.google.protobuf.BlockingService; -import com.google.protobuf.RpcController; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.BlockingRpcChannel; +import org.apache.hbase.shaded.com.google.protobuf.BlockingService; +import org.apache.hbase.shaded.com.google.protobuf.RpcController; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Tests for authentication token creation and usage diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java index ab2bacc..ab08c0b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java @@ -72,7 +72,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; /** * Base test class for visibility labels basic features diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java index 06fc7be..28b3378 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsOpWithDifferentUsersNoACL.java @@ -43,7 +43,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsOpWithDifferentUsersNoACL { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java index f67296d..9de20c7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithACL.java @@ -56,7 +56,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsWithACL { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java index 63c08a2..9ced86a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabelsWithDefaultVisLabelService.java @@ -55,7 +55,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLabelsWithDefaultVisLabelService extends TestVisibilityLabels { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java index 1410c78..66e40da 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLablesWithGroups.java @@ -54,7 +54,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, MediumTests.class}) public class TestVisibilityLablesWithGroups { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java index 1d027d0..56f08fb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestWithDisabledAuthorization.java @@ -48,7 +48,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; @Category({SecurityTests.class, LargeTests.class}) public class TestWithDisabledAuthorization { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index 38afc3b..d98a2e6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -75,7 +75,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.MD5Hash; import org.junit.Assert; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; /** * Utilities class for snapshots diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java index 835f92e..7476586 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotManifest.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.snapshot; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java index bde3e49..5f2eece 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java @@ -46,7 +46,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; -import com.google.protobuf.ServiceException; +import org.apache.hbase.shaded.com.google.protobuf.ServiceException; @Category({RegionServerTests.class, MediumTests.class}) public class TestWALFiltering { diff --git a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java index fa66d69..481ae9f 100644 --- a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java +++ b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/SparkSQLPushDownFilter.java @@ -17,8 +17,8 @@ package org.apache.hadoop.hbase.spark; -import com.google.protobuf.ByteString; -import com.google.protobuf.InvalidProtocolBufferException; +import org.apache.hbase.shaded.com.google.protobuf.ByteString; +import org.apache.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Cell; -- 2.6.1