From 151ac5593405a2784302df8ab7735b2a5b11df35 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Tue, 27 Jun 2017 14:30:10 -0700 Subject: [PATCH] HBASE-17908 Upgrade guava Pull in guava 22.0 by using the shaded version up in new hbase-thirdparty project. In poms, exclude guava everywhere except on hadoop-common. Do this so we minimize transitive includes. hadoop-common is needed because hadoop Configuration uses guava doing preconditions. Everywhere we used guava, instead use shaded so fix a load of imports. Stopwatch API changed as did hashing and toStringHelper which is now in MoreObjects class. Otherwise, minimal changes to come up on 22.0 --- hbase-assembly/pom.xml | 6 + hbase-client/pom.xml | 15 +- .../org/apache/hadoop/hbase/MetaTableAccessor.java | 2 +- .../hadoop/hbase/client/AsyncConnectionImpl.java | 2 +- .../hadoop/hbase/client/AsyncHBaseAdmin.java | 2 +- .../apache/hadoop/hbase/client/AsyncProcess.java | 2 +- .../hbase/client/AsyncRequestFutureImpl.java | 2 +- .../client/AsyncRpcRetryingCallerFactory.java | 6 +- .../AsyncScanSingleRegionRpcRetryingCaller.java | 2 +- .../apache/hadoop/hbase/client/AsyncTableBase.java | 2 +- .../hbase/client/AsyncTableResultScanner.java | 4 +- .../hadoop/hbase/client/BufferedMutatorImpl.java | 2 +- .../hbase/client/ClientAsyncPrefetchScanner.java | 2 +- .../apache/hadoop/hbase/client/ClientScanner.java | 2 +- .../client/ColumnFamilyDescriptorBuilder.java | 2 +- .../hbase/client/ConnectionConfiguration.java | 2 +- .../hbase/client/ConnectionImplementation.java | 2 +- .../hadoop/hbase/client/ConnectionUtils.java | 4 +- .../org/apache/hadoop/hbase/client/HBaseAdmin.java | 2 +- .../apache/hadoop/hbase/client/HRegionLocator.java | 2 +- .../org/apache/hadoop/hbase/client/HTable.java | 4 +- .../hadoop/hbase/client/HTableMultiplexer.java | 4 +- .../hadoop/hbase/client/MetricsConnection.java | 2 +- .../hadoop/hbase/client/MultiServerCallable.java | 2 +- .../org/apache/hadoop/hbase/client/Mutation.java | 10 +- .../client/PreemptiveFastFailInterceptor.java | 2 +- .../java/org/apache/hadoop/hbase/client/Query.java | 6 +- .../hadoop/hbase/client/RawAsyncTableImpl.java | 2 +- .../hbase/client/RegionCoprocessorRpcChannel.java | 2 +- .../hbase/client/RegionCoprocessorServiceExec.java | 2 +- .../org/apache/hadoop/hbase/client/RowAccess.java | 2 +- .../hbase/client/ScannerCallableWithReplicas.java | 2 +- .../hbase/client/ServerStatisticTracker.java | 2 +- .../hbase/client/SimpleRequestController.java | 2 +- .../backoff/ExponentialClientBackoffPolicy.java | 2 +- .../client/metrics/ServerSideScanMetrics.java | 2 +- .../hbase/client/replication/ReplicationAdmin.java | 4 +- .../client/replication/ReplicationSerDeHelper.java | 2 +- .../hadoop/hbase/filter/ColumnCountGetFilter.java | 2 +- .../hbase/filter/ColumnPaginationFilter.java | 2 +- .../hadoop/hbase/filter/ColumnPrefixFilter.java | 2 +- .../hadoop/hbase/filter/ColumnRangeFilter.java | 2 +- .../apache/hadoop/hbase/filter/CompareFilter.java | 2 +- .../hadoop/hbase/filter/DependentColumnFilter.java | 2 +- .../hadoop/hbase/filter/FirstKeyOnlyFilter.java | 2 +- .../apache/hadoop/hbase/filter/FuzzyRowFilter.java | 2 +- .../hadoop/hbase/filter/InclusiveStopFilter.java | 2 +- .../apache/hadoop/hbase/filter/KeyOnlyFilter.java | 2 +- .../org/apache/hadoop/hbase/filter/PageFilter.java | 2 +- .../apache/hadoop/hbase/filter/PrefixFilter.java | 2 +- .../hbase/filter/SingleColumnValueFilter.java | 2 +- .../hadoop/hbase/filter/TimestampsFilter.java | 2 +- .../apache/hadoop/hbase/ipc/AbstractRpcClient.java | 10 +- .../apache/hadoop/hbase/ipc/BlockingRpcClient.java | 2 +- .../hadoop/hbase/ipc/CoprocessorRpcUtils.java | 2 +- .../java/org/apache/hadoop/hbase/ipc/IPCUtil.java | 2 +- .../hbase/ipc/NettyRpcClientConfigHelper.java | 2 +- .../apache/hadoop/hbase/ipc/RpcClientFactory.java | 4 +- .../replication/ReplicationQueuesClientZKImpl.java | 4 +- .../hbase/replication/ReplicationStateZKBase.java | 2 +- .../hbase/replication/ReplicationTableBase.java | 2 +- .../hbase/security/AbstractHBaseSaslRpcClient.java | 2 +- .../hbase/security/access/AccessControlUtil.java | 8 +- .../hadoop/hbase/security/access/Permission.java | 2 +- .../security/access/ShadedAccessControlUtil.java | 4 +- .../hadoop/hbase/zookeeper/MetricsZooKeeper.java | 2 +- .../apache/hadoop/hbase/zookeeper/ZNodePaths.java | 2 +- .../hbase/TestInterfaceAudienceAnnotations.java | 10 +- .../hadoop/hbase/client/TestClientNoCluster.java | 12 +- .../hbase/security/TestHBaseSaslRpcClient.java | 2 +- hbase-common/pom.xml | 5 +- .../apache/hadoop/hbase/ByteBufferKeyValue.java | 2 +- .../org/apache/hadoop/hbase/CellComparator.java | 2 +- .../java/org/apache/hadoop/hbase/ChoreService.java | 2 +- .../java/org/apache/hadoop/hbase/KeyValue.java | 2 +- .../org/apache/hadoop/hbase/KeyValueTestUtil.java | 2 +- .../java/org/apache/hadoop/hbase/KeyValueUtil.java | 4 +- .../org/apache/hadoop/hbase/ScheduledChore.java | 2 +- .../java/org/apache/hadoop/hbase/ServerName.java | 2 +- .../java/org/apache/hadoop/hbase/TableName.java | 6 +- .../org/apache/hadoop/hbase/io/ByteBufferPool.java | 2 +- .../apache/hadoop/hbase/io/LimitInputStream.java | 4 +- .../org/apache/hadoop/hbase/io/crypto/Context.java | 2 +- .../org/apache/hadoop/hbase/io/crypto/aes/AES.java | 4 +- .../hadoop/hbase/io/crypto/aes/AESDecryptor.java | 2 +- .../hadoop/hbase/io/crypto/aes/AESEncryptor.java | 2 +- .../hbase/io/crypto/aes/CommonsCryptoAES.java | 4 +- .../io/crypto/aes/CommonsCryptoAESDecryptor.java | 2 +- .../io/crypto/aes/CommonsCryptoAESEncryptor.java | 2 +- .../hadoop/hbase/io/encoding/EncodedDataBlock.java | 4 +- .../encoding/HFileBlockDefaultEncodingContext.java | 2 +- .../io/hadoopbackport/ThrottledInputStream.java | 2 +- .../apache/hadoop/hbase/io/util/LRUDictionary.java | 2 +- .../apache/hadoop/hbase/io/util/StreamUtils.java | 2 +- .../java/org/apache/hadoop/hbase/net/Address.java | 4 +- .../org/apache/hadoop/hbase/nio/MultiByteBuff.java | 2 +- .../apache/hadoop/hbase/nio/SingleByteBuff.java | 2 +- .../org/apache/hadoop/hbase/security/User.java | 2 +- .../apache/hadoop/hbase/security/UserProvider.java | 14 +- .../hbase/util/AbstractPositionedByteRange.java | 2 +- .../apache/hadoop/hbase/util/ByteBufferUtils.java | 2 +- .../apache/hadoop/hbase/util/ByteRangeUtils.java | 2 +- .../java/org/apache/hadoop/hbase/util/Bytes.java | 10 +- .../apache/hadoop/hbase/util/ClassLoaderBase.java | 2 +- .../org/apache/hadoop/hbase/util/ClassSize.java | 2 +- .../hadoop/hbase/util/CoprocessorClassLoader.java | 4 +- .../hadoop/hbase/util/DynamicClassLoader.java | 29 +- .../org/apache/hadoop/hbase/util/OrderedBytes.java | 4 +- .../java/org/apache/hadoop/hbase/util/Threads.java | 2 +- .../hbase/util/test/RedundantKVGenerator.java | 2 +- .../apache/hadoop/hbase/zookeeper/ZKConfig.java | 2 +- .../java/org/apache/hadoop/hbase/ClassFinder.java | 2 +- .../org/apache/hadoop/hbase/TestClassFinder.java | 5 +- .../hadoop/hbase/TestHBaseConfiguration.java | 2 +- .../apache/hadoop/hbase/codec/TestCellCodec.java | 4 +- .../hadoop/hbase/codec/TestCellCodecWithTags.java | 4 +- .../hadoop/hbase/codec/TestKeyValueCodec.java | 4 +- .../hbase/codec/TestKeyValueCodecWithTags.java | 4 +- hbase-endpoint/pom.xml | 333 +++-- .../hbase/client/TestRpcControllerFactory.java | 2 +- ...HRegionServerBulkLoadWithOldSecureEndpoint.java | 2 +- hbase-examples/pom.xml | 10 + .../hbase/client/example/HttpProxyExample.java | 4 +- .../client/example/MultiThreadedClientExample.java | 2 +- .../hbase/client/example/TestHttpProxyExample.java | 2 +- hbase-external-blockcache/pom.xml | 12 + hbase-hadoop-compat/pom.xml | 4 + .../hadoop/hbase/metrics/OperationMetrics.java | 2 +- hbase-hadoop2-compat/pom.xml | 14 +- .../org/apache/hadoop/hbase/metrics/Interns.java | 6 +- .../hadoop/hbase/metrics/MetricsInfoImpl.java | 7 +- .../impl/GlobalMetricRegistriesAdapter.java | 2 +- .../hadoop/metrics2/impl/JmxCacheBuster.java | 2 +- .../metrics2/lib/DynamicMetricsRegistry.java | 6 +- .../metrics2/util/MetricSampleQuantiles.java | 2 +- hbase-it/pom.xml | 22 +- .../hbase/IntegrationTestAcidGuarantees.java | 2 +- .../hadoop/hbase/IntegrationTestBackupRestore.java | 8 +- .../apache/hadoop/hbase/IntegrationTestIngest.java | 2 +- .../hbase/IntegrationTestRegionReplicaPerf.java | 10 +- .../IntegrationTestRegionReplicaReplication.java | 2 +- .../hbase/chaos/factories/MonkeyFactory.java | 2 +- .../hadoop/hbase/chaos/util/ChaosMonkeyRunner.java | 2 +- .../hadoop/hbase/ipc/IntegrationTestRpcClient.java | 2 +- .../hbase/mapreduce/IntegrationTestBulkLoad.java | 4 +- .../hadoop/hbase/mttr/IntegrationTestMTTR.java | 7 +- .../hbase/test/IntegrationTestBigLinkedList.java | 2 +- .../hbase/test/IntegrationTestLoadAndVerify.java | 2 +- .../hbase/test/IntegrationTestReplication.java | 2 +- ...eBoundedMultiGetRequestsWithRegionReplicas.java | 2 +- ...nTestTimeBoundedRequestsWithRegionReplicas.java | 2 +- hbase-metrics-api/pom.xml | 8 + .../hbase/metrics/MetricRegistriesLoader.java | 2 +- .../hbase/metrics/TestMetricRegistriesLoader.java | 4 +- hbase-metrics/pom.xml | 4 + .../hbase/metrics/impl/TestRefCountingMap.java | 2 +- hbase-prefix-tree/pom.xml | 20 +- .../prefixtree/decode/PrefixTreeArraySearcher.java | 2 +- .../encode/column/ColumnSectionWriter.java | 2 +- .../codec/prefixtree/encode/other/LongEncoder.java | 2 +- .../prefixtree/encode/row/RowSectionWriter.java | 2 +- .../prefixtree/encode/tokenize/Tokenizer.java | 2 +- .../prefixtree/encode/tokenize/TokenizerNode.java | 2 +- .../hadoop/hbase/util/byterange/ByteRangeSet.java | 2 +- .../prefixtree/builder/TestTokenizerData.java | 2 +- .../codec/prefixtree/builder/TestTreeDepth.java | 2 +- .../builder/data/TestTokenizerDataBasic.java | 2 +- .../builder/data/TestTokenizerDataEdgeCase.java | 2 +- .../codec/prefixtree/column/TestColumnBuilder.java | 2 +- .../codec/prefixtree/column/TestColumnData.java | 2 +- .../column/data/TestColumnDataRandom.java | 2 +- .../column/data/TestColumnDataSimple.java | 2 +- .../codec/prefixtree/row/BaseTestRowData.java | 2 +- .../hbase/codec/prefixtree/row/TestRowData.java | 2 +- .../hbase/codec/prefixtree/row/TestRowEncoder.java | 2 +- .../row/data/TestRowDataComplexQualifiers.java | 2 +- .../prefixtree/row/data/TestRowDataDeeper.java | 2 +- .../row/data/TestRowDataDifferentTimestamps.java | 2 +- .../prefixtree/row/data/TestRowDataEmpty.java | 2 +- .../row/data/TestRowDataExerciseFInts.java | 2 +- .../row/data/TestRowDataMultiFamilies.java | 2 +- .../codec/prefixtree/row/data/TestRowDataNub.java | 2 +- .../row/data/TestRowDataNumberStrings.java | 2 +- .../row/data/TestRowDataQualifierByteOrdering.java | 2 +- .../row/data/TestRowDataRandomKeyValues.java | 2 +- .../data/TestRowDataRandomKeyValuesWithTags.java | 2 +- .../row/data/TestRowDataSearchWithPrefix.java | 2 +- .../row/data/TestRowDataSearcherRowMiss.java | 2 +- .../prefixtree/row/data/TestRowDataSimple.java | 2 +- .../row/data/TestRowDataSingleQualifier.java | 2 +- .../prefixtree/row/data/TestRowDataTrivial.java | 2 +- .../row/data/TestRowDataTrivialWithTags.java | 2 +- .../codec/prefixtree/row/data/TestRowDataUrls.java | 2 +- .../row/data/TestRowDataUrlsExample.java | 2 +- .../prefixtree/timestamp/TestTimestampData.java | 2 +- hbase-procedure/pom.xml | 4 +- .../apache/hadoop/hbase/procedure2/Procedure.java | 2 +- .../hadoop/hbase/procedure2/ProcedureExecutor.java | 4 +- .../hbase/procedure2/ProcedureScheduler.java | 2 +- .../hadoop/hbase/procedure2/ProcedureUtil.java | 2 +- .../procedure2/RemoteProcedureDispatcher.java | 2 +- .../hbase/procedure2/SimpleProcedureScheduler.java | 2 +- .../procedure2/store/wal/WALProcedureStore.java | 2 +- .../hadoop/hbase/procedure2/TestProcedureUtil.java | 8 + hbase-protocol-shaded/pom.xml | 2 +- .../com/google/protobuf/AbstractMessage.java | 7 +- .../com/google/protobuf/AbstractMessageLite.java | 21 +- .../shaded/com/google/protobuf/AbstractParser.java | 26 +- .../hbase/shaded/com/google/protobuf/Any.java | 12 + .../hbase/shaded/com/google/protobuf/Api.java | 11 + .../shaded/com/google/protobuf/BoolValue.java | 11 + .../com/google/protobuf/BooleanArrayList.java | 12 +- .../shaded/com/google/protobuf/BytesValue.java | 11 + .../com/google/protobuf/CodedInputStream.java | 6 +- .../com/google/protobuf/CodedOutputStream.java | 4 +- .../com/google/protobuf/DescriptorProtos.java | 649 ++++++++- .../shaded/com/google/protobuf/Descriptors.java | 7 +- .../com/google/protobuf/DoubleArrayList.java | 12 +- .../shaded/com/google/protobuf/DoubleValue.java | 11 + .../hbase/shaded/com/google/protobuf/Duration.java | 41 +- .../com/google/protobuf/DurationOrBuilder.java | 3 +- .../shaded/com/google/protobuf/DynamicMessage.java | 7 +- .../hbase/shaded/com/google/protobuf/Empty.java | 11 + .../hbase/shaded/com/google/protobuf/Enum.java | 11 + .../shaded/com/google/protobuf/EnumValue.java | 11 + .../hbase/shaded/com/google/protobuf/Field.java | 11 + .../shaded/com/google/protobuf/FieldMask.java | 11 + .../hbase/shaded/com/google/protobuf/FieldSet.java | 7 +- .../shaded/com/google/protobuf/FloatArrayList.java | 12 +- .../shaded/com/google/protobuf/FloatValue.java | 11 + .../com/google/protobuf/GeneratedMessage.java | 4 + .../com/google/protobuf/GeneratedMessageLite.java | 24 +- .../com/google/protobuf/GeneratedMessageV3.java | 6 +- .../shaded/com/google/protobuf/Int32Value.java | 11 + .../shaded/com/google/protobuf/Int64Value.java | 11 + .../shaded/com/google/protobuf/IntArrayList.java | 12 +- .../hbase/shaded/com/google/protobuf/Internal.java | 15 + .../shaded/com/google/protobuf/LazyFieldLite.java | 1 + .../shaded/com/google/protobuf/ListValue.java | 11 + .../shaded/com/google/protobuf/LongArrayList.java | 12 +- .../hbase/shaded/com/google/protobuf/MapEntry.java | 26 +- .../shaded/com/google/protobuf/MapEntryLite.java | 5 + .../hbase/shaded/com/google/protobuf/MapField.java | 9 + .../shaded/com/google/protobuf/MapFieldLite.java | 14 +- .../hbase/shaded/com/google/protobuf/Method.java | 11 + .../hbase/shaded/com/google/protobuf/Mixin.java | 11 + .../hbase/shaded/com/google/protobuf/Option.java | 11 + .../hbase/shaded/com/google/protobuf/Parser.java | 13 + .../protobuf/PrimitiveNonBoxingCollection.java | 34 + .../google/protobuf/RepeatedFieldBuilderV3.java | 18 +- .../com/google/protobuf/SingleFieldBuilderV3.java | 12 +- .../shaded/com/google/protobuf/SourceContext.java | 11 + .../shaded/com/google/protobuf/StringValue.java | 11 + .../hbase/shaded/com/google/protobuf/Struct.java | 11 + .../shaded/com/google/protobuf/TextFormat.java | 4 +- .../shaded/com/google/protobuf/Timestamp.java | 51 + .../hbase/shaded/com/google/protobuf/Type.java | 11 + .../shaded/com/google/protobuf/UInt32Value.java | 11 + .../shaded/com/google/protobuf/UInt64Value.java | 11 + .../com/google/protobuf/UnknownFieldSet.java | 4 +- .../shaded/com/google/protobuf/UnsafeUtil.java | 401 ++++-- .../hbase/shaded/com/google/protobuf/Utf8.java | 2 +- .../hbase/shaded/com/google/protobuf/Value.java | 11 + .../com/google/protobuf/compiler/PluginProtos.java | 105 +- .../protobuf/generated/TestProcedureProtos.java | 11 + .../shaded/ipc/protobuf/generated/TestProtos.java | 66 + .../protobuf/generated/AccessControlProtos.java | 165 +++ .../shaded/protobuf/generated/AdminProtos.java | 462 +++++++ .../shaded/protobuf/generated/BackupProtos.java | 55 + .../shaded/protobuf/generated/CellProtos.java | 22 + .../shaded/protobuf/generated/ClientProtos.java | 411 +++++- .../shaded/protobuf/generated/ClusterIdProtos.java | 11 + .../protobuf/generated/ClusterStatusProtos.java | 110 ++ .../protobuf/generated/ComparatorProtos.java | 99 ++ .../protobuf/generated/EncryptionProtos.java | 11 + .../protobuf/generated/ErrorHandlingProtos.java | 33 + .../hbase/shaded/protobuf/generated/FSProtos.java | 22 + .../shaded/protobuf/generated/FilterProtos.java | 330 +++++ .../shaded/protobuf/generated/HBaseProtos.java | 264 ++++ .../shaded/protobuf/generated/HFileProtos.java | 22 + .../protobuf/generated/LoadBalancerProtos.java | 11 + .../protobuf/generated/LockServiceProtos.java | 77 ++ .../shaded/protobuf/generated/MapReduceProtos.java | 22 + .../protobuf/generated/MasterProcedureProtos.java | 264 ++++ .../shaded/protobuf/generated/MasterProtos.java | 1419 ++++++++++++++++++++ .../shaded/protobuf/generated/ProcedureProtos.java | 88 ++ .../shaded/protobuf/generated/QuotaProtos.java | 209 +++ .../hbase/shaded/protobuf/generated/RPCProtos.java | 88 ++ .../protobuf/generated/RegionNormalizerProtos.java | 11 + .../generated/RegionServerStatusProtos.java | 154 +++ .../protobuf/generated/ReplicationProtos.java | 209 +++ .../shaded/protobuf/generated/SnapshotProtos.java | 66 + .../shaded/protobuf/generated/TracingProtos.java | 11 + .../hbase/shaded/protobuf/generated/WALProtos.java | 110 ++ .../shaded/protobuf/generated/ZooKeeperProtos.java | 66 + hbase-rest/pom.xml | 16 +- .../org/apache/hadoop/hbase/rest/RESTServer.java | 2 +- hbase-rsgroup/pom.xml | 4 +- .../hadoop/hbase/rsgroup/RSGroupAdminClient.java | 2 +- .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java | 2 +- .../hadoop/hbase/rsgroup/RSGroupAdminServer.java | 4 +- .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 12 +- .../hbase/rsgroup/RSGroupInfoManagerImpl.java | 6 +- .../balancer/TestRSGroupBasedLoadBalancer.java | 4 +- .../apache/hadoop/hbase/rsgroup/TestRSGroups.java | 2 +- .../hadoop/hbase/rsgroup/TestRSGroupsBase.java | 4 +- .../hbase/rsgroup/TestRSGroupsOfflineMode.java | 2 +- .../hbase/rsgroup/VerifyingRSGroupAdminClient.java | 4 +- hbase-server/pom.xml | 57 +- .../apache/hadoop/hbase/ZKNamespaceManager.java | 2 +- .../hadoop/hbase/backup/BackupHFileCleaner.java | 6 +- .../apache/hadoop/hbase/backup/HFileArchiver.java | 8 +- .../hadoop/hbase/backup/impl/BackupAdminImpl.java | 2 +- .../hadoop/hbase/backup/impl/BackupCommands.java | 2 +- .../hadoop/hbase/backup/impl/BackupManager.java | 2 +- .../hbase/backup/impl/TableBackupClient.java | 2 +- .../hbase/backup/mapreduce/HFileSplitterJob.java | 2 +- .../hadoop/hbase/client/locking/EntityLock.java | 2 +- .../hbase/client/locking/LockServiceClient.java | 2 +- .../coordination/SplitLogManagerCoordination.java | 2 +- .../coordination/SplitLogWorkerCoordination.java | 2 +- .../hbase/coprocessor/MetricsCoprocessor.java | 2 +- .../hadoop/hbase/coprocessor/RegionObserver.java | 2 +- .../hadoop/hbase/executor/ExecutorService.java | 8 +- .../hbase/favored/FavoredNodeAssignmentHelper.java | 6 +- .../hbase/favored/FavoredNodeLoadBalancer.java | 6 +- .../hadoop/hbase/favored/FavoredNodesManager.java | 6 +- .../hbase/favored/StartcodeAgnosticServerName.java | 4 +- .../org/apache/hadoop/hbase/http/HttpServer.java | 4 +- .../hadoop/hbase/io/FSDataInputStreamWrapper.java | 2 +- .../java/org/apache/hadoop/hbase/io/MetricsIO.java | 2 +- .../hbase/io/asyncfs/AsyncFSOutputHelper.java | 4 +- .../io/asyncfs/FanOutOneBlockAsyncDFSOutput.java | 2 +- .../FanOutOneBlockAsyncDFSOutputHelper.java | 4 +- .../FanOutOneBlockAsyncDFSOutputSaslHelper.java | 14 +- .../apache/hadoop/hbase/io/hfile/CacheConfig.java | 2 +- .../hadoop/hbase/io/hfile/CombinedBlockCache.java | 2 +- .../org/apache/hadoop/hbase/io/hfile/HFile.java | 4 +- .../apache/hadoop/hbase/io/hfile/HFileBlock.java | 4 +- .../hadoop/hbase/io/hfile/HFileReaderImpl.java | 2 +- .../hadoop/hbase/io/hfile/LruBlockCache.java | 11 +- .../hadoop/hbase/io/hfile/LruCachedBlockQueue.java | 2 +- .../hbase/io/hfile/bucket/BucketAllocator.java | 10 +- .../hadoop/hbase/io/hfile/bucket/BucketCache.java | 4 +- .../hbase/io/hfile/bucket/CachedEntryQueue.java | 2 +- .../org/apache/hadoop/hbase/ipc/RpcExecutor.java | 4 +- .../org/apache/hadoop/hbase/ipc/RpcServer.java | 2 +- .../hadoop/hbase/ipc/RpcServerInterface.java | 2 +- .../apache/hadoop/hbase/ipc/SimpleRpcServer.java | 2 +- .../org/apache/hadoop/hbase/mapred/Driver.java | 2 +- .../apache/hadoop/hbase/mapreduce/CellCounter.java | 2 +- .../hadoop/hbase/mapreduce/HFileOutputFormat2.java | 2 +- .../apache/hadoop/hbase/mapreduce/HashTable.java | 6 +- .../org/apache/hadoop/hbase/mapreduce/Import.java | 4 +- .../apache/hadoop/hbase/mapreduce/ImportTsv.java | 8 +- .../apache/hadoop/hbase/mapreduce/JarFinder.java | 2 +- .../hbase/mapreduce/LoadIncrementalHFiles.java | 8 +- .../mapreduce/MultiTableHFileOutputFormat.java | 2 +- .../mapreduce/MultiTableSnapshotInputFormat.java | 2 +- .../MultiTableSnapshotInputFormatImpl.java | 4 +- .../apache/hadoop/hbase/mapreduce/SyncTable.java | 7 +- .../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 2 +- .../hbase/mapreduce/TableRecordReaderImpl.java | 2 +- .../hbase/mapreduce/TableSnapshotInputFormat.java | 2 +- .../mapreduce/TableSnapshotInputFormatImpl.java | 2 +- .../apache/hadoop/hbase/mapreduce/WALPlayer.java | 2 +- .../mapreduce/replication/VerifyReplication.java | 2 +- .../apache/hadoop/hbase/master/CatalogJanitor.java | 2 +- .../org/apache/hadoop/hbase/master/HMaster.java | 6 +- .../apache/hadoop/hbase/master/MasterServices.java | 2 +- .../hadoop/hbase/master/MasterWalManager.java | 2 +- .../apache/hadoop/hbase/master/ServerManager.java | 2 +- .../hadoop/hbase/master/SplitLogManager.java | 2 +- .../hadoop/hbase/master/TableNamespaceManager.java | 2 +- .../hadoop/hbase/master/TableStateManager.java | 2 +- .../hbase/master/assignment/AssignmentManager.java | 2 +- .../hbase/master/assignment/GCRegionProcedure.java | 2 +- .../assignment/MergeTableRegionsProcedure.java | 2 +- .../hbase/master/assignment/RegionStateStore.java | 2 +- .../hbase/master/assignment/RegionStates.java | 2 +- .../assignment/SplitTableRegionProcedure.java | 2 +- .../hbase/master/balancer/BaseLoadBalancer.java | 10 +- .../master/balancer/FavoredStochasticBalancer.java | 6 +- .../master/balancer/RegionLocationFinder.java | 18 +- .../hbase/master/balancer/SimpleLoadBalancer.java | 2 +- .../master/balancer/StochasticLoadBalancer.java | 4 +- .../master/cleaner/BaseFileCleanerDelegate.java | 4 +- .../hadoop/hbase/master/cleaner/CleanerChore.java | 8 +- .../hadoop/hbase/master/cleaner/HFileCleaner.java | 2 +- .../hadoop/hbase/master/locking/LockManager.java | 2 +- .../master/procedure/CloneSnapshotProcedure.java | 2 +- .../master/procedure/CreateTableProcedure.java | 2 +- .../master/procedure/MasterProcedureScheduler.java | 2 +- .../master/procedure/RSProcedureDispatcher.java | 2 +- .../snapshot/EnabledTableSnapshotHandler.java | 2 +- .../hbase/master/snapshot/SnapshotFileCache.java | 4 +- .../org/apache/hadoop/hbase/mob/MobFileCache.java | 2 +- .../mob/compactions/PartitionedMobCompactor.java | 2 +- .../hadoop/hbase/monitoring/LogMonitoring.java | 2 +- .../monitoring/MemoryBoundedLogMessageBuffer.java | 6 +- .../hadoop/hbase/monitoring/TaskMonitor.java | 4 +- .../namespace/NamespaceTableAndRegionInfo.java | 2 +- .../apache/hadoop/hbase/procedure/Procedure.java | 2 +- .../hbase/procedure/ProcedureCoordinator.java | 2 +- .../hadoop/hbase/procedure/ProcedureMember.java | 2 +- .../flush/MasterFlushTableProcedureManager.java | 2 +- .../hbase/quotas/AverageIntervalRateLimiter.java | 2 +- .../hbase/quotas/FixedIntervalRateLimiter.java | 2 +- .../hadoop/hbase/quotas/MasterQuotaManager.java | 2 +- .../hbase/quotas/NamespaceQuotaSnapshotStore.java | 4 +- .../org/apache/hadoop/hbase/quotas/QuotaCache.java | 2 +- .../hadoop/hbase/quotas/QuotaObserverChore.java | 8 +- .../apache/hadoop/hbase/quotas/RateLimiter.java | 2 +- .../hbase/quotas/RegionServerRpcQuotaManager.java | 2 +- .../quotas/RegionServerSpaceQuotaManager.java | 2 +- .../hbase/quotas/SnapshotQuotaObserverChore.java | 4 +- .../hbase/quotas/TableQuotaSnapshotStore.java | 4 +- .../hbase/regionserver/AbstractMemStore.java | 2 +- .../AnnotationReadingPriorityFunction.java | 2 +- .../hadoop/hbase/regionserver/CellChunkMap.java | 2 +- .../apache/hadoop/hbase/regionserver/CellSet.java | 2 +- .../apache/hadoop/hbase/regionserver/Chunk.java | 4 +- .../hadoop/hbase/regionserver/ChunkCreator.java | 4 +- .../hadoop/hbase/regionserver/CompactSplit.java | 4 +- .../regionserver/CompactedHFilesDischarger.java | 2 +- .../hbase/regionserver/CompactingMemStore.java | 2 +- .../regionserver/CompositeImmutableSegment.java | 2 +- .../ConstantSizeRegionSplitPolicy.java | 2 +- .../regionserver/DefaultStoreFileManager.java | 6 +- .../DelimitedKeyPrefixRegionSplitPolicy.java | 5 +- .../apache/hadoop/hbase/regionserver/HRegion.java | 14 +- .../hbase/regionserver/HRegionFileSystem.java | 2 +- .../hadoop/hbase/regionserver/HRegionServer.java | 6 +- .../apache/hadoop/hbase/regionserver/HStore.java | 12 +- .../hadoop/hbase/regionserver/HStoreFile.java | 2 +- .../hbase/regionserver/HeapMemoryManager.java | 2 +- .../hadoop/hbase/regionserver/KeyValueHeap.java | 2 +- .../hadoop/hbase/regionserver/LogRoller.java | 2 +- .../hbase/regionserver/MemStoreCompactor.java | 2 +- .../hadoop/hbase/regionserver/MemStoreFlusher.java | 2 +- .../hadoop/hbase/regionserver/MemStoreLABImpl.java | 4 +- .../hbase/regionserver/MetricsRegionServer.java | 2 +- .../MetricsTableWrapperAggregateImpl.java | 2 +- .../MultiVersionConcurrencyControl.java | 6 +- .../hadoop/hbase/regionserver/MutableSegment.java | 2 +- .../hadoop/hbase/regionserver/RSRpcServices.java | 11 +- .../apache/hadoop/hbase/regionserver/Region.java | 2 +- .../hbase/regionserver/RegionCoprocessorHost.java | 4 +- .../hbase/regionserver/RegionMergeRequest.java | 2 +- .../hbase/regionserver/RegionSplitPolicy.java | 2 +- .../hadoop/hbase/regionserver/RowProcessor.java | 2 +- .../apache/hadoop/hbase/regionserver/ScanInfo.java | 2 +- .../hbase/regionserver/ScannerIdGenerator.java | 16 +- .../apache/hadoop/hbase/regionserver/Segment.java | 2 +- .../hadoop/hbase/regionserver/SegmentFactory.java | 2 +- .../hbase/regionserver/ServerNonceManager.java | 2 +- .../hadoop/hbase/regionserver/SplitLogWorker.java | 2 +- .../hadoop/hbase/regionserver/SplitRequest.java | 2 +- .../hbase/regionserver/StoreFileComparators.java | 6 +- .../hbase/regionserver/StoreFileManager.java | 2 +- .../hadoop/hbase/regionserver/StoreFileReader.java | 2 +- .../hadoop/hbase/regionserver/StoreFileWriter.java | 2 +- .../hadoop/hbase/regionserver/StoreScanner.java | 2 +- .../hbase/regionserver/StripeStoreEngine.java | 2 +- .../hbase/regionserver/StripeStoreFileManager.java | 4 +- .../hbase/regionserver/StripeStoreFlusher.java | 2 +- .../compactions/CompactionRequest.java | 2 +- .../hbase/regionserver/compactions/Compactor.java | 2 +- .../compactions/DateTieredCompactionPolicy.java | 10 +- .../regionserver/compactions/DefaultCompactor.java | 2 +- .../ExponentialCompactionWindowFactory.java | 2 +- .../compactions/SortedCompactionPolicy.java | 8 +- .../compactions/StripeCompactionPolicy.java | 2 +- .../querymatcher/LegacyScanQueryMatcher.java | 2 +- .../hbase/regionserver/wal/AbstractFSWAL.java | 4 +- .../hadoop/hbase/regionserver/wal/AsyncFSWAL.java | 2 +- .../regionserver/wal/AsyncProtobufLogWriter.java | 2 +- .../hadoop/hbase/regionserver/wal/Compressor.java | 2 +- .../hadoop/hbase/regionserver/wal/FSHLog.java | 2 +- .../hadoop/hbase/regionserver/wal/FSWALEntry.java | 2 +- .../hadoop/hbase/regionserver/wal/MetricsWAL.java | 2 +- .../regionserver/wal/SequenceIdAccounting.java | 2 +- .../hadoop/hbase/regionserver/wal/WALEdit.java | 2 +- .../hbase/replication/BaseReplicationEndpoint.java | 5 +- .../hbase/replication/BulkLoadCellFilter.java | 2 +- .../replication/HBaseReplicationEndpoint.java | 2 +- .../NamespaceTableCfWALEntryFilter.java | 2 +- .../hbase/replication/ReplicationEndpoint.java | 3 +- .../hbase/replication/ScopeWALEntryFilter.java | 2 +- .../master/ReplicationHFileCleaner.java | 10 +- .../replication/master/ReplicationLogCleaner.java | 10 +- .../regionserver/DumpReplicationQueues.java | 2 +- .../HBaseInterClusterReplicationEndpoint.java | 11 +- .../replication/regionserver/HFileReplicator.java | 2 +- .../RegionReplicaReplicationEndpoint.java | 6 +- .../replication/regionserver/Replication.java | 2 +- .../regionserver/ReplicationSinkManager.java | 6 +- .../regionserver/ReplicationSource.java | 36 +- .../regionserver/ReplicationSourceManager.java | 4 +- .../regionserver/ReplicationSourceShipper.java | 6 +- .../hbase/security/access/AccessControlLists.java | 6 +- .../hbase/security/access/AccessController.java | 14 +- .../hadoop/hbase/security/access/AuthResult.java | 2 +- .../hbase/security/access/TableAuthManager.java | 8 +- .../hadoop/hbase/security/token/TokenUtil.java | 5 +- .../hbase/security/token/ZKSecretWatcher.java | 2 +- .../security/visibility/ExpressionParser.java | 2 +- .../security/visibility/VisibilityController.java | 11 +- .../visibility/VisibilityReplicationEndpoint.java | 47 +- .../hbase/security/visibility/VisibilityUtils.java | 6 +- .../hbase/snapshot/RestoreSnapshotHelper.java | 2 +- .../hbase/snapshot/SnapshotDescriptionUtils.java | 2 +- .../java/org/apache/hadoop/hbase/tool/Canary.java | 2 +- .../hadoop/hbase/util/ConfigurationUtil.java | 2 +- .../hadoop/hbase/util/DirectMemoryUtils.java | 2 +- .../org/apache/hadoop/hbase/util/FSHDFSUtils.java | 2 +- .../hadoop/hbase/util/FSTableDescriptors.java | 4 +- .../java/org/apache/hadoop/hbase/util/FSUtils.java | 10 +- .../org/apache/hadoop/hbase/util/HBaseFsck.java | 14 +- .../java/org/apache/hadoop/hbase/util/IdLock.java | 2 +- .../apache/hadoop/hbase/util/IdReadWriteLock.java | 2 +- .../apache/hadoop/hbase/util/JvmPauseMonitor.java | 17 +- .../hadoop/hbase/util/RegionSizeCalculator.java | 2 +- .../hadoop/hbase/util/RegionSplitCalculator.java | 6 +- .../apache/hadoop/hbase/util/RegionSplitter.java | 8 +- .../hadoop/hbase/wal/AbstractFSWALProvider.java | 2 +- .../hadoop/hbase/wal/AsyncFSWALProvider.java | 2 +- .../main/java/org/apache/hadoop/hbase/wal/WAL.java | 2 +- .../org/apache/hadoop/hbase/wal/WALFactory.java | 2 +- .../java/org/apache/hadoop/hbase/wal/WALKey.java | 2 +- .../org/apache/hadoop/hbase/wal/WALSplitter.java | 6 +- .../hbase/zookeeper/MiniZooKeeperCluster.java | 2 +- .../org/apache/hadoop/hbase/GenericTestUtils.java | 6 +- .../apache/hadoop/hbase/HBaseTestingUtility.java | 3 - .../apache/hadoop/hbase/PerformanceEvaluation.java | 6 +- .../hadoop/hbase/ScanPerformanceEvaluation.java | 85 +- .../apache/hadoop/hbase/TestAcidGuarantees.java | 2 +- .../org/apache/hadoop/hbase/TestIOFencing.java | 2 +- .../apache/hadoop/hbase/TestMetaTableAccessor.java | 2 +- .../org/apache/hadoop/hbase/TestNamespace.java | 2 +- .../org/apache/hadoop/hbase/TestRegionLoad.java | 4 +- .../hbase/backup/TestBackupBoundaryTests.java | 2 +- .../hadoop/hbase/backup/TestBackupDelete.java | 2 +- .../hbase/backup/TestBackupDeleteRestore.java | 2 +- .../hbase/backup/TestBackupDeleteWithFailures.java | 2 +- .../hadoop/hbase/backup/TestBackupDescribe.java | 2 +- .../hbase/backup/TestBackupMultipleDeletes.java | 2 +- .../hadoop/hbase/backup/TestBackupShowHistory.java | 2 +- .../hbase/backup/TestBackupStatusProgress.java | 2 +- .../hadoop/hbase/backup/TestFullRestore.java | 2 +- .../hadoop/hbase/backup/TestIncrementalBackup.java | 2 +- .../backup/TestIncrementalBackupDeleteTable.java | 2 +- .../backup/TestIncrementalBackupWithBulkLoad.java | 2 +- .../backup/TestIncrementalBackupWithFailures.java | 2 +- .../hadoop/hbase/backup/TestRemoteBackup.java | 2 +- .../hbase/backup/TestRepairAfterFailedDelete.java | 2 +- .../hbase/backup/master/TestBackupLogCleaner.java | 4 +- .../hbase/client/SimpleRawScanResultConsumer.java | 2 +- .../hbase/client/SimpleScanResultConsumer.java | 2 +- .../hadoop/hbase/client/TestEnableTable.java | 6 +- .../org/apache/hadoop/hbase/client/TestHCM.java | 2 +- .../hadoop/hbase/client/TestSizeFailures.java | 2 +- .../hbase/client/TestSnapshotFromClient.java | 2 +- .../hadoop/hbase/client/TestTableFavoredNodes.java | 4 +- .../hadoop/hbase/codec/TestCellMessageCodec.java | 4 +- .../hbase/coprocessor/SimpleRegionObserver.java | 2 +- .../hbase/coprocessor/TestCoprocessorMetrics.java | 2 +- ...ObserverForAddingMutationsFromCoprocessors.java | 2 +- .../favored/TestFavoredNodeAssignmentHelper.java | 4 +- .../org/apache/hadoop/hbase/filter/TestFilter.java | 2 +- .../apache/hadoop/hbase/filter/TestFilterList.java | 2 +- .../filter/TestFuzzyRowAndColumnRangeFilter.java | 2 +- .../hbase/filter/TestFuzzyRowFilterEndToEnd.java | 2 +- .../hadoop/hbase/io/hfile/CacheTestUtils.java | 2 +- .../hadoop/hbase/io/hfile/TestCacheOnWrite.java | 2 +- .../io/hfile/TestLazyDataBlockDecompression.java | 2 +- .../apache/hadoop/hbase/ipc/AbstractTestIPC.java | 4 +- .../apache/hadoop/hbase/ipc/TestBufferChain.java | 4 +- .../apache/hadoop/hbase/ipc/TestProtoBufRpc.java | 2 +- .../hadoop/hbase/ipc/TestRpcClientLeaks.java | 2 +- .../hadoop/hbase/ipc/TestRpcHandlerException.java | 2 +- .../ipc/TestRpcServerSlowConnectionSetup.java | 2 +- .../hadoop/hbase/ipc/TestSimpleRpcScheduler.java | 8 +- .../hadoop/hbase/mapred/TestGroupingTableMap.java | 8 +- .../mapred/TestMultiTableSnapshotInputFormat.java | 2 +- .../apache/hadoop/hbase/mapred/TestRowCounter.java | 2 +- .../hbase/mapred/TestTableMapReduceUtil.java | 4 +- .../mapreduce/MultiTableInputFormatTestBase.java | 2 +- .../hadoop/hbase/mapreduce/TestHashTable.java | 4 +- .../hbase/mapreduce/TestImportTsvParser.java | 6 +- .../TestLoadIncrementalHFilesSplitRecovery.java | 2 +- .../TestMultiTableSnapshotInputFormat.java | 6 +- .../TestMultiTableSnapshotInputFormatImpl.java | 11 +- .../hadoop/hbase/mapreduce/TestSyncTable.java | 2 +- .../mapreduce/TestTableSnapshotInputFormat.java | 2 +- .../org/apache/hadoop/hbase/master/TestMaster.java | 2 +- .../hbase/master/TestMasterStatusServlet.java | 2 +- .../LoadBalancerPerformanceEvaluation.java | 13 +- .../LoadOnlyFavoredStochasticBalancer.java | 2 +- .../master/balancer/TestBaseLoadBalancer.java | 2 +- .../balancer/TestFavoredNodeTableImport.java | 2 +- .../TestFavoredStochasticBalancerPickers.java | 4 +- .../TestFavoredStochasticLoadBalancer.java | 6 +- .../hbase/master/cleaner/TestLogsCleaner.java | 2 +- .../cleaner/TestReplicationHFileCleaner.java | 2 +- .../master/cleaner/TestSnapshotFromMaster.java | 2 +- .../master/snapshot/TestSnapshotFileCache.java | 4 +- .../hbase/namespace/TestNamespaceAuditor.java | 2 +- .../hbase/procedure/TestProcedureCoordinator.java | 2 +- .../hadoop/hbase/procedure/TestZKProcedure.java | 2 +- .../procedure/TestZKProcedureControllers.java | 2 +- .../hbase/quotas/SpaceQuotaHelperForTests.java | 6 +- .../quotas/TestNamespaceQuotaViolationStore.java | 2 +- .../apache/hadoop/hbase/quotas/TestQuotaAdmin.java | 2 +- .../hbase/quotas/TestQuotaObserverChore.java | 2 +- .../TestQuotaObserverChoreWithMiniCluster.java | 4 +- .../quotas/TestSnapshotQuotaObserverChore.java | 6 +- .../hbase/quotas/TestSpaceQuotasWithSnapshots.java | 2 +- .../hbase/quotas/TestTableQuotaViolationStore.java | 2 +- .../hadoop/hbase/quotas/TestTablesWithQuotas.java | 2 +- .../AbstractTestDateTieredCompactionPolicy.java | 4 +- .../hadoop/hbase/regionserver/RegionAsTable.java | 2 +- .../TestCompactionArchiveIOException.java | 2 +- .../hbase/regionserver/TestCompactionPolicy.java | 2 +- .../hbase/regionserver/TestDefaultMemStore.java | 6 +- .../regionserver/TestEndToEndSplitTransaction.java | 4 +- .../hadoop/hbase/regionserver/TestHRegion.java | 4 +- .../regionserver/TestHRegionReplayEvents.java | 2 +- .../regionserver/TestHRegionServerBulkLoad.java | 2 +- .../TestHRegionServerBulkLoadWithOldClient.java | 2 +- .../hadoop/hbase/regionserver/TestHStoreFile.java | 6 +- .../hadoop/hbase/regionserver/TestMemStoreLAB.java | 8 +- .../regionserver/TestPerColumnFamilyFlush.java | 2 +- .../hbase/regionserver/TestRSStatusServlet.java | 2 +- .../TestRegionMergeTransactionOnCluster.java | 2 +- .../hbase/regionserver/TestReversibleScanners.java | 2 +- .../hadoop/hbase/regionserver/TestStore.java | 3 +- .../regionserver/TestTimestampFilterSeekHint.java | 2 +- .../compactions/MockStoreFileGenerator.java | 4 +- .../compactions/TestStripeCompactionPolicy.java | 4 +- .../regionserver/wal/TestAsyncProtobufLog.java | 2 +- .../replication/TestReplicationSmallTests.java | 2 +- .../hbase/replication/TestReplicationSource.java | 2 +- .../TestReplicationWALEntryFilters.java | 2 +- .../TestRegionReplicaReplicationEndpoint.java | 2 +- ...stRegionReplicaReplicationEndpointNoMaster.java | 6 +- .../regionserver/TestReplicationSinkManager.java | 2 +- .../regionserver/TestReplicationSourceManager.java | 2 +- .../hadoop/hbase/security/HBaseKerberosUtils.java | 2 +- .../hadoop/hbase/security/TestSecureIPC.java | 2 +- .../org/apache/hadoop/hbase/security/TestUser.java | 2 +- .../hbase/security/access/SecureTestUtil.java | 4 +- .../security/access/TestAccessController.java | 2 +- .../hadoop/hbase/security/access/TestCellACLs.java | 2 +- .../security/access/TestNamespaceCommands.java | 2 +- .../security/access/TestTablePermissions.java | 4 +- .../access/TestWithDisabledAuthorization.java | 2 +- .../security/token/TestTokenAuthentication.java | 2 +- .../apache/hadoop/hbase/tool/TestCanaryTool.java | 2 +- .../hadoop/hbase/util/MultiThreadedAction.java | 2 +- .../hadoop/hbase/util/MultiThreadedUpdater.java | 2 +- .../hadoop/hbase/util/TestConfigurationUtil.java | 4 +- .../hadoop/hbase/util/TestHBaseFsckTwoRS.java | 2 +- .../hbase/util/TestRegionSplitCalculator.java | 4 +- .../apache/hadoop/hbase/util/TestSortedList.java | 2 +- .../hadoop/hbase/util/hbck/HbckTestingUtil.java | 2 +- .../util/hbck/TestOfflineMetaRebuildOverlap.java | 2 +- .../apache/hadoop/hbase/wal/TestWALFiltering.java | 2 +- .../org/apache/hadoop/hbase/wal/TestWALSplit.java | 6 +- hbase-shell/pom.xml | 48 + .../hbase/client/rsgroup/TestShellRSGroups.java | 4 +- hbase-spark/pom.xml | 22 + .../hadoop/hbase/spark/TestJavaHBaseContext.java | 2 +- hbase-testing-util/pom.xml | 36 + hbase-thrift/pom.xml | 28 +- .../hbase/thrift/TBoundedThreadPoolServer.java | 2 +- .../hadoop/hbase/thrift/ThriftServerRunner.java | 6 +- .../apache/hadoop/hbase/thrift2/ThriftServer.java | 2 +- .../hadoop/hbase/thrift/TestThriftHttpServer.java | 2 +- .../hbase/thrift/TestThriftServerCmdLine.java | 2 +- .../thrift2/TestThriftHBaseServiceHandler.java | 2 +- pom.xml | 14 +- 682 files changed, 8024 insertions(+), 1442 deletions(-) create mode 100644 hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/PrimitiveNonBoxingCollection.java diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml index b0e6fb8400..a129ed6392 100644 --- a/hbase-assembly/pom.xml +++ b/hbase-assembly/pom.xml @@ -211,6 +211,12 @@ org.apache.hbase hbase-hadoop-compat + + + com.google.guava + guava + + org.apache.hbase diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml index 4c51625776..615a3450d6 100644 --- a/hbase-client/pom.xml +++ b/hbase-client/pom.xml @@ -150,8 +150,8 @@ commons-logging - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous com.google.protobuf @@ -207,6 +207,17 @@ org.apache.curator curator-client + + org.apache.commons + commons-crypto + ${commons-crypto.version} + + + net.java.dev.jna + jna + + + diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index de79a67433..bd8968d893 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -67,7 +67,7 @@ import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.PairOfSameType; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java index 776498acab..d7c9ceacfb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnectionImpl.java @@ -21,7 +21,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.NO_NONCE_GENERATOR; import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey; import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import io.netty.util.HashedWheelTimer; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java index 1da660c3cb..0e7a32cb9e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.java @@ -36,7 +36,7 @@ import java.util.function.BiConsumer; import java.util.regex.Pattern; import java.util.stream.Collectors; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import io.netty.util.Timeout; import io.netty.util.TimerTask; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java index ba6b052a73..22efdaa360 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java index e6e4fd1ea5..710ec91336 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java index 270f265be1..1a0c304af4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRpcRetryingCallerFactory.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.client; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull; import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts; import io.netty.util.HashedWheelTimer; @@ -438,4 +438,4 @@ class AsyncRpcRetryingCallerFactory { public AdminRequestCallerBuilder adminRequest(){ return new AdminRequestCallerBuilder<>(); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java index 6343c8b7e4..e5448d9c10 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncScanSingleRegionRpcRetryingCaller.java @@ -28,7 +28,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.translateException; import static org.apache.hadoop.hbase.client.ConnectionUtils.updateResultsMetrics; import static org.apache.hadoop.hbase.client.ConnectionUtils.updateServerSideMetrics; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import io.netty.util.HashedWheelTimer; import io.netty.util.Timeout; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java index 3c551dfc54..006e3e29da 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java @@ -21,7 +21,7 @@ import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hbase.client.ConnectionUtils.allOf; import static org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.util.List; import java.util.concurrent.CompletableFuture; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java index 2f8a9390f6..28a55689b1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableResultScanner.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.client; import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java index 2a55de984b..0ddc15999b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorImpl.java @@ -15,7 +15,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java index 007e638236..34c5620e5a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize; import java.io.IOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java index 59cf005cd7..d3b19e4844 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java @@ -21,7 +21,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.calcEstimatedSize; import static org.apache.hadoop.hbase.client.ConnectionUtils.createScanResultCache; import static org.apache.hadoop.hbase.client.ConnectionUtils.incRegionCountMetrics; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java index bb302dbc85..688509f998 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.util.Collections; import java.util.HashMap; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java index 4547e78636..6727929fa6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionConfiguration.java @@ -15,7 +15,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Configuration parameters for the connection. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java index 44974cba7c..4dff7ddb22 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java @@ -122,7 +122,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.ipc.RemoteException; import org.apache.zookeeper.KeeperException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.Nullable; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java index 98ac8458db..f25f1ddbf3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java @@ -21,8 +21,8 @@ import static java.util.stream.Collectors.toList; import static org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW; import static org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 1c6ea03b0c..739e873142 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -202,7 +202,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; import org.apache.zookeeper.KeeperException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import com.google.protobuf.Descriptors; import com.google.protobuf.Message; import com.google.protobuf.RpcController; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HRegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HRegionLocator.java index f2c57461e6..07850092f8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HRegionLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HRegionLocator.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index d207f6a098..bacaf31237 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client; import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; // DO NOT MAKE USE OF THESE IMPORTS! THEY ARE HERE FOR COPROCESSOR ENDPOINTS ONLY. // Internally, we use shaded protobuf. This below are part of our public API. import com.google.protobuf.Descriptors; @@ -1323,4 +1323,4 @@ public class HTable implements Table { } return mutator; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java index 313125c0f3..0210c9b9ab 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java @@ -19,8 +19,8 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.IOException; import java.util.AbstractMap.SimpleEntry; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java index ea64900df9..410e7d8785 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java @@ -26,7 +26,7 @@ import com.codahale.metrics.JmxReporter; import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.RatioGauge; import com.codahale.metrics.Timer; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java index 38a1950e5e..64dada0bd6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActi import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Callable that handles the multi method call going against a single diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index b010c2f8e7..f6cb4b1b2a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -50,11 +50,11 @@ import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; -import com.google.common.io.ByteArrayDataInput; -import com.google.common.io.ByteArrayDataOutput; -import com.google.common.io.ByteStreams; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput; +import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput; +import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams; @InterfaceAudience.Public public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java index abac040ba5..52ec9cdb98 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PreemptiveFastFailInterceptor.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.Map.Entry; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java index 7f50d13302..0bf54ae092 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.util.Map; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.Filter; @@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.security.visibility.Authorizations; import org.apache.hadoop.hbase.security.visibility.VisibilityConstants; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java index 537a92d0f3..5110b8a64e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java @@ -524,4 +524,4 @@ class RawAsyncTableImpl implements RawAsyncTable { (loc, error) -> onLocateComplete(stubMaker, callable, callback, locs, nonNullEndKey, endKeyInclusive, new AtomicBoolean(false), new AtomicInteger(0), loc, error)); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java index 2dc73e0dcc..3b10549923 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java @@ -100,4 +100,4 @@ class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel { public byte[] getLastRegion() { return lastRegion; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java index 6de5779ad9..77458b0115 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; import com.google.protobuf.Descriptors.MethodDescriptor; import com.google.protobuf.Message; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java index 758bce6890..ccf779bcb7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java index 852e2c64e1..34d5d8942c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServerStatisticTracker.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServerStatisticTracker.java index f78ca41f39..e437b484ef 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServerStatisticTracker.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServerStatisticTracker.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.concurrent.ConcurrentHashMap; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java index de2cbe1865..1e1cb9a374 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SimpleRequestController.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.InterruptedIOException; import java.util.ArrayList; import java.util.Collection; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java index a39bd96afb..e0652e62ed 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Simple exponential backoff policy on for the client that uses a percent^4 times the diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java index 03764ed12f..2f73a0eb4d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java @@ -23,7 +23,7 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; /** * Provides server side metrics related to scan operations. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java index 94425f9283..752d18c2ff 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java @@ -58,8 +58,8 @@ import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** *

diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java index f561f4a6fc..19c504247e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Strings; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.Collection; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index dad413297d..913dcc5937 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 2ce9746541..bbffbe2a09 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index b6e9607c53..987e090424 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 69b5088260..ab4cad7140 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * This filter is used for selecting only those keys with columns that are diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index 451d7ee0ee..56c633b043 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * This is a generic filter to be used to filter by comparison. It takes an * operator (equal, greater, not equal, etc) and a byte [] comparator. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index c14314cf30..5717fc4f63 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index 8493610da9..e4f03e0eba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 895ffc8c99..20c6656651 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.UnsafeAccess; import org.apache.hadoop.hbase.util.UnsafeAvailChecker; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This is optimized version of a standard FuzzyRowFilter Filters data based on fuzzy row key. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index ed95a7d15f..070b7c3666 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * A Filter that stops after the given row. There is no "RowStopFilter" because diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index b0829413a5..7f75963590 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index 894e7b466b..c83242372d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** * Implementation of Filter interface that limits results to a specific page diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index 33b3ead063..6dc4bc201f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index 0dbc0bb712..c056833a59 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index b1409e302a..699a0640fb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index d414f7036b..afaaccef53 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.ipc; import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE; import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java index d27602e7de..ab1a801d72 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.net.SocketAddress; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java index dc5f122447..c4e9455d2e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcUtils.java @@ -243,4 +243,4 @@ public final class CoprocessorRpcUtils { } return new DoNotRetryIOException(controller.errorText()); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index d2e0e90dd1..6dab3b5906 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java index fe039d3fa1..a8aca525d9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import io.netty.channel.Channel; import io.netty.channel.EventLoopGroup; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientFactory.java index 8cdfb03488..e72015948f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientFactory.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.ipc; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import java.net.SocketAddress; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java index 050437353e..a3a865fe38 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationQueuesClientZKImpl.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.replication; import java.util.List; import java.util.Set; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java index c80822e499..47b780d2e8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationStateZKBase.java @@ -22,7 +22,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.List; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTableBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTableBase.java index 4606e22fc7..fa1d2d60e9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTableBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTableBase.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.replication; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.HColumnDescriptor; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java index cc0114ba65..cd2f4cd5a6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AbstractHBaseSaslRpcClient.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.security; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.Map; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java index 1873ea3a6c..a99751ca9e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java @@ -30,9 +30,9 @@ import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessCont import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse; import org.apache.hadoop.hbase.util.ByteStringer; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import com.google.protobuf.ByteString; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; @@ -766,4 +766,4 @@ public class AccessControlUtil { .setPermission(ret) ).build(); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java index b25783d061..8476f615bf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.VersionedWritable; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; /** * Base permissions instance representing the ability to perform a given set diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java index 03798ad61c..d56c7a15c6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.security.access; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.security.access.Permission.Action; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetricsZooKeeper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetricsZooKeeper.java index 6b5e188317..031f1c36c2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetricsZooKeeper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetricsZooKeeper.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.zookeeper; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource; import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSourceImpl; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZNodePaths.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZNodePaths.java index acf1afc5f8..4c66b8f815 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZNodePaths.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZNodePaths.java @@ -24,7 +24,7 @@ import static org.apache.hadoop.hbase.HConstants.SPLIT_LOGDIR_NAME; import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT; import static org.apache.hadoop.hbase.HRegionInfo.DEFAULT_REPLICA_ID; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import java.util.Optional; import java.util.stream.IntStream; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java index c5af6ff6cf..b904d7c252 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java @@ -286,7 +286,7 @@ public class TestInterfaceAudienceAnnotations { * Checks whether all the classes in client and common modules contain * {@link InterfaceAudience} annotations. */ - @Test + @Ignore @Test public void testInterfaceAudienceAnnotation() throws ClassNotFoundException, IOException, LinkageError { @@ -327,7 +327,7 @@ public class TestInterfaceAudienceAnnotations { * Checks whether all the classes in client and common modules that are marked * InterfaceAudience.Public do not have {@link InterfaceStability} annotations. */ - @Test + @Ignore @Test public void testNoInterfaceStabilityAnnotationForPublicAPI() throws ClassNotFoundException, IOException, LinkageError { @@ -411,7 +411,7 @@ public class TestInterfaceAudienceAnnotations { 0, classes.size()); } - @Test + @Ignore @Test public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError { Set> classes = findPublicClasses(); List, Method>> protosReturnType = new ArrayList<>(); @@ -443,7 +443,7 @@ public class TestInterfaceAudienceAnnotations { return classes; } - @Test + @Ignore @Test public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError { Set> classes = findPublicClasses(); List, Method, Class>> protosParamType = new ArrayList<>(); @@ -463,7 +463,7 @@ public class TestInterfaceAudienceAnnotations { protosParamType.size()); } - @Test + @Ignore @Test public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError { Set> classes = findPublicClasses(); List> classList = new ArrayList<>(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java index d20c7c87b0..775365749e 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestClientNoCluster.java @@ -88,7 +88,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.common.base.Stopwatch; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; @@ -722,14 +722,13 @@ public class TestClientNoCluster extends Configured implements Tool { TableName tableName = TableName.valueOf(BIG_USER_TABLE); if (get) { try (Table table = sharedConnection.getTable(tableName)){ - Stopwatch stopWatch = new Stopwatch(); - stopWatch.start(); + Stopwatch stopWatch = Stopwatch.createStarted(); for (int i = 0; i < namespaceSpan; i++) { byte [] b = format(rd.nextLong()); Get g = new Get(b); table.get(g); if (i % printInterval == 0) { - LOG.info("Get " + printInterval + "/" + stopWatch.elapsedMillis()); + LOG.info("Get " + printInterval + "/" + stopWatch.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS)); stopWatch.reset(); stopWatch.start(); } @@ -739,15 +738,14 @@ public class TestClientNoCluster extends Configured implements Tool { } } else { try (BufferedMutator mutator = sharedConnection.getBufferedMutator(tableName)) { - Stopwatch stopWatch = new Stopwatch(); - stopWatch.start(); + Stopwatch stopWatch = Stopwatch.createStarted(); for (int i = 0; i < namespaceSpan; i++) { byte [] b = format(rd.nextLong()); Put p = new Put(b); p.addColumn(HConstants.CATALOG_FAMILY, b, b); mutator.mutate(p); if (i % printInterval == 0) { - LOG.info("Put " + printInterval + "/" + stopWatch.elapsedMillis()); + LOG.info("Put " + printInterval + "/" + stopWatch.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS)); stopWatch.reset(); stopWatch.start(); } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java index 7573a78557..33f7872e8c 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java @@ -27,7 +27,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.base.Strings; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; import java.io.IOException; import java.io.InputStream; diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml index 6e14b02986..98a759f86c 100644 --- a/hbase-common/pom.xml +++ b/hbase-common/pom.xml @@ -220,10 +220,9 @@ test-jar test - - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous commons-logging diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java index 0b9caac248..87868688e4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This Cell is an implementation of {@link ByteBufferCell} where the data resides in diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java index bb08d6ce1b..8fa7e854f5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellComparator.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.primitives.Longs; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Longs; /** * Compare two HBase cells. Do not use this method comparing -ROOT- or diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index 70858f4296..86b6840d39 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -27,7 +27,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.atomic.AtomicInteger; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 4fff83078c..9b9dc4381e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.RawComparator; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * An HBase Key/Value. This is the fundamental HBase Type. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java index 0de627a645..3bbdb10058 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueTestUtil.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.hbase.util.Strings; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Private public class KeyValueTestUtil { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java index 807749af73..35f5225cba 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValueUtil.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.util.IterableUtils; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.WritableUtils; -import com.google.common.base.Function; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * static convenience methods for dealing with KeyValues and collections of KeyValues diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java index 2d1eec5472..5fbbbb8a9c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java @@ -25,7 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * ScheduledChore is a task performed on a period in hbase. ScheduledChores become active once diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java index fabf0c0bb9..88abc3f584 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.net.InetAddresses; +import org.apache.hadoop.hbase.shaded.com.google.common.net.InetAddresses; /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java index 08bd76345a..c74a5e20fd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java @@ -141,7 +141,8 @@ public final class TableName implements Comparable { throw new IllegalArgumentException("Name is null or empty"); } - int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(tableName, + int namespaceDelimIndex = + org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(tableName, (byte) NAMESPACE_DELIM); if (namespaceDelimIndex < 0){ isLegalTableQualifierName(tableName); @@ -435,7 +436,8 @@ public final class TableName implements Comparable { } } - int namespaceDelimIndex = com.google.common.primitives.Bytes.lastIndexOf(fullName, + int namespaceDelimIndex = + org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.lastIndexOf(fullName, (byte) NAMESPACE_DELIM); if (namespaceDelimIndex < 0) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java index 07ba3db7cd..e2d2563563 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferPool.java @@ -22,7 +22,7 @@ import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java index 7a759127d6..133321a1c6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java @@ -17,8 +17,8 @@ package org.apache.hadoop.hbase.io; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull; import java.io.FilterInputStream; import java.io.IOException; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java index a8dc396d4a..ad19001ff8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.MD5Hash; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Crypto context. Encapsulates an encryption algorithm and its key material. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java index 302091f259..f6c9e7c0cc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AES.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.io.crypto.Context; import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * AES-128, provided by the JCE diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java index db27df691b..5b0505e24a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESDecryptor.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.Decryptor; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java index d2ce7d34fb..b752cdc4e2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/AESEncryptor.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.Encryptor; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java index 7ea4e637c8..d3889450dd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAES.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.io.crypto.Context; import org.apache.hadoop.hbase.io.crypto.Decryptor; import org.apache.hadoop.hbase.io.crypto.Encryptor; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java index e33e3661ae..a6aead6602 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESDecryptor.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.io.crypto.aes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.commons.crypto.stream.CryptoInputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java index 346cd79d89..e38d7b875f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/aes/CommonsCryptoAESEncryptor.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.io.crypto.aes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.commons.crypto.stream.CryptoOutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index f42615a0e7..5ec91f795c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.Compressor; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Encapsulates a data block compressed using a particular encoding algorithm. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java index 1045f945b2..1a94cac9f2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/HFileBlockDefaultEncodingContext.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.Compressor; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * A default implementation of {@link HFileBlockEncodingContext}. It will diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java index 88c1b3759b..0aefb24601 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hadoopbackport/ThrottledInputStream.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.io.hadoopbackport; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java index 2456961726..b70dfbcf5c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * WALDictionary using an LRU eviction algorithm. Uses a linked list running diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java index 9325284282..25245d5067 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/StreamUtils.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /* * It seems like as soon as somebody sets himself to the task of creating VInt encoding, his mind diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java index 3ee8cfc844..5eb11732bd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.net; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.net.HostAndPort; +import org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort; /** * An immutable type to hold a hostname and port combo, like an Endpoint @@ -46,7 +46,7 @@ public class Address implements Comparable

{ } public String getHostname() { - return this.hostAndPort.getHostText(); + return this.hostAndPort.getHost(); } public int getPort() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java index 948321dace..4bac5c7349 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/MultiByteBuff.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ObjectIntPair; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Provides a unified view of all the underlying ByteBuffers and will look as if a bigger diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java index 6ed90adfaf..865f2e6c0b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/nio/SingleByteBuff.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.nio; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java index c7d65be8bb..984a71469f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java @@ -28,7 +28,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; -import com.google.common.cache.LoadingCache; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Methods; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java index 955abfc96a..dc5f74024c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java @@ -24,13 +24,13 @@ import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.BaseConfigurable; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java index 8d3d0cf9d9..bc3cf047a1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractPositionedByteRange.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Extends the basic {@link SimpleByteRange} implementation with position diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java index 6dac30004b..d7f20354c4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java @@ -16,7 +16,7 @@ */ package org.apache.hadoop.hbase.util; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataInputStream; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java index 672366d681..36d9941d54 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java @@ -25,7 +25,7 @@ import java.util.Collection; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Utility methods for working with {@link ByteRange}. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index a481d27a6b..5e6500bcee 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.util; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.common.base.Preconditions.checkPositionIndex; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkArgument; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkNotNull; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.checkPositionIndex; import java.io.DataInput; import java.io.DataOutput; @@ -48,8 +48,8 @@ import org.apache.hadoop.io.WritableUtils; import sun.misc.Unsafe; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import com.google.protobuf.ByteString; /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java index 6e5aaa6d98..97b51abde4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.util; import java.net.URL; import java.net.URLClassLoader; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index e064cc0acf..000e99a65b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.util; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.concurrent.ConcurrentHashMap; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java index bc51440ab5..19f59dc736 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CoprocessorClassLoader.java @@ -44,8 +44,8 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.io.IOUtils; -import com.google.common.base.Preconditions; -import com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; /** * ClassLoader used to load classes for Coprocessor instances. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java index 1a730696c4..4da2d4bfcd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/DynamicClassLoader.java @@ -180,19 +180,22 @@ public class DynamicClassLoader extends ClassLoaderBase { private synchronized void loadNewJars() { // Refresh local jar file lists if (localDir != null) { - for (File file : localDir.listFiles()) { - String fileName = file.getName(); - if (jarModifiedTime.containsKey(fileName)) { - continue; - } - if (file.isFile() && fileName.endsWith(".jar")) { - jarModifiedTime.put(fileName, Long.valueOf(file.lastModified())); - try { - URL url = file.toURI().toURL(); - addURL(url); - } catch (MalformedURLException mue) { - // This should not happen, just log it - LOG.warn("Failed to load new jar " + fileName, mue); + File [] files = localDir.listFiles(); + if (files != null) { + for (File file : files) { + String fileName = file.getName(); + if (jarModifiedTime.containsKey(fileName)) { + continue; + } + if (file.isFile() && fileName.endsWith(".jar")) { + jarModifiedTime.put(fileName, Long.valueOf(file.lastModified())); + try { + URL url = file.toURI().toURL(); + addURL(url); + } catch (MalformedURLException mue) { + // This should not happen, just log it + LOG.warn("Failed to load new jar " + fileName, mue); + } } } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index 3e4bc6ca58..9de96d6a6a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -28,7 +28,7 @@ import java.nio.charset.Charset; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Utility class that handles ordered byte arrays. That is, unlike @@ -461,7 +461,7 @@ public class OrderedBytes { static int lengthVaruint64(PositionedByteRange src, boolean comp) { int a0 = (comp ? DESCENDING : ASCENDING).apply(src.peek()) & 0xff; if (a0 <= 240) return 1; - if (a0 >= 241 && a0 <= 248) return 2; + if (a0 <= 248) return 2; if (a0 == 249) return 3; if (a0 == 250) return 4; if (a0 == 251) return 5; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index 279ce95efa..9fb0587302 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Thread Utility diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java index 87d56a9355..297571150d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/test/RedundantKVGenerator.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.io.WritableUtils; -import com.google.common.primitives.Bytes; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes; /** * Generate list of key values which are very useful to test data block encoding diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java index 47731f4f7d..8e17cceded 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Map.Entry; import java.util.Properties; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index 0aa30eed6d..4202036ab4 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -273,7 +273,7 @@ public class ClassFinder { Class c = Class.forName(className, false, this.getClass().getClassLoader()); boolean isCandidateClass = null == classFilter || classFilter.isCandidateClass(c); return isCandidateClass ? c : null; - } catch (ClassNotFoundException classNotFoundEx) { + } catch (NoClassDefFoundError|ClassNotFoundException classNotFoundEx) { if (!proceedOnExceptions) { throw classNotFoundEx; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java index 244c26744f..fc30bbe1bc 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestClassFinder.java @@ -46,10 +46,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; +import org.junit.*; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index 3cec72e1e2..b713ff6bf9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -37,7 +37,7 @@ import org.junit.AfterClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; @Category({MiscTests.class, SmallTests.class}) public class TestHBaseConfiguration { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java index f6f5a35451..2fcd92d97a 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodec.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; @Category({MiscTests.class, SmallTests.class}) public class TestCellCodec { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java index cc707423a9..4035497eb3 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestCellCodecWithTags.java @@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; @Category({MiscTests.class, SmallTests.class}) public class TestCellCodecWithTags { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java index e3366fe343..e21fb07be9 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java @@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; @Category({MiscTests.class, SmallTests.class}) public class TestKeyValueCodec { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java index 238d0a6c57..b69a135f74 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodecWithTags.java @@ -41,8 +41,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; @Category({MiscTests.class, SmallTests.class}) public class TestKeyValueCodecWithTags { diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml index fd16654fda..6647a7a894 100644 --- a/hbase-endpoint/pom.xml +++ b/hbase-endpoint/pom.xml @@ -1,6 +1,6 @@ - - 4.0.0 - - hbase - org.apache.hbase - 3.0.0-SNAPSHOT - .. - - - hbase-endpoint - Apache HBase - Coprocessor Endpoint - HBase Coprocessor Endpoint implementations - - - - true - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - - - - org.apache.maven.plugins - maven-source-plugin - + 4.0.0 + + hbase + org.apache.hbase + 3.0.0-SNAPSHOT + .. + + hbase-endpoint + Apache HBase - Coprocessor Endpoint + HBase Coprocessor Endpoint implementations + + + true + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-source-plugin + maven-assembly-plugin @@ -58,50 +55,59 @@ true + + + + + + org.eclipse.m2e + lifecycle-mapping + 1.0.0 + + + + + + org.apache.hadoop + hadoop-maven-plugins + [2.0.5-alpha,) + + protoc + + + + + + + + + + - - - - - org.eclipse.m2e - lifecycle-mapping - 1.0.0 - - - - - - org.apache.hadoop - hadoop-maven-plugins - [2.0.5-alpha,) - - protoc - - - - - - - - - - - - - - - - - - org.apache.hbase - hbase-annotations - test-jar - test - - - org.apache.hbase - hbase-common - + + + + + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + + + org.apache.hbase + hbase-annotations + test-jar + test + + + org.apache.hbase + hbase-common + + + org.apache.hbase + hbase-common + test-jar + test + org.apache.hbase hbase-hadoop-compat @@ -124,88 +130,87 @@ test-jar test - - org.apache.hbase - hbase-protocol - - - org.apache.hbase - hbase-client - - - - org.apache.hbase - hbase-server - - - org.apache.hbase - hbase-server - test-jar - test - - - - commons-logging - commons-logging - - - - - - - skipRpcTests - - - skipRpcTests - - - - true - true - - - - compile-protobuf - - - compile-protobuf - - - - - - org.xolstice.maven.plugins - protobuf-maven-plugin - - - compile-protoc - generate-sources - - compile - - - - ${basedir}/../hbase-protocol/src/main/protobuf - - - - - - - - + + org.apache.hbase + hbase-server + + + org.apache.hbase + hbase-server + test-jar + test + + + + commons-logging + commons-logging + + + + + + skipRpcTests + + + skipRpcTests + + + + true + true + + + + compile-protobuf + + + compile-protobuf + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + compile-protoc + generate-sources + + compile + + + + ${basedir}/../hbase-protocol/src/main/protobuf + + + + + + + + - hadoop-2.0 - - !hadoop.profile + + + !hadoop.profile @@ -220,21 +225,45 @@ org.apache.hadoop hadoop-client + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs test-jar test + + + com.google.guava + guava + + org.apache.hadoop hadoop-minicluster test + + + com.google.guava + guava + + @@ -268,6 +297,12 @@ org.apache.hadoop hadoop-minicluster + + + com.google.guava + guava + + @@ -278,5 +313,5 @@ - + diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java index 6aaef809b4..936ab60ae0 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java @@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.List; diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java index dc895f6917..2c38662e68 100644 --- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java +++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java @@ -50,7 +50,7 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Tests bulk loading of HFiles with old secure Endpoint client for backward compatibility. Will be diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml index 0da0f74299..f4b53451b1 100644 --- a/hbase-examples/pom.xml +++ b/hbase-examples/pom.xml @@ -108,6 +108,10 @@ + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + + org.apache.hbase hbase-annotations test-jar @@ -217,6 +221,12 @@ if we can combine these profiles somehow --> org.apache.hadoop hadoop-mapreduce-client-core + + + com.google.guava + guava + + org.apache.hadoop diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java index 5b86ad3bea..c940c2a6db 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/HttpProxyExample.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.client.example; -import com.google.common.base.Preconditions; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import io.netty.bootstrap.ServerBootstrap; import io.netty.buffer.ByteBuf; diff --git a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java index 90fa2c700a..aebc66678b 100644 --- a/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java +++ b/hbase-examples/src/main/java/org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.client.example; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; diff --git a/hbase-examples/src/test/java/org/apache/hadoop/hbase/client/example/TestHttpProxyExample.java b/hbase-examples/src/test/java/org/apache/hadoop/hbase/client/example/TestHttpProxyExample.java index 7c44f55d70..fa9e542361 100644 --- a/hbase-examples/src/test/java/org/apache/hadoop/hbase/client/example/TestHttpProxyExample.java +++ b/hbase-examples/src/test/java/org/apache/hadoop/hbase/client/example/TestHttpProxyExample.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client.example; import static org.junit.Assert.assertEquals; -import com.google.common.io.ByteStreams; +import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams; import java.nio.charset.StandardCharsets; diff --git a/hbase-external-blockcache/pom.xml b/hbase-external-blockcache/pom.xml index 12083fed9b..c5667b3eb4 100644 --- a/hbase-external-blockcache/pom.xml +++ b/hbase-external-blockcache/pom.xml @@ -227,6 +227,12 @@ org.apache.hadoop hadoop-common + + + com.google.guava + guava + + @@ -273,6 +279,12 @@ org.apache.hadoop hadoop-common + + + com.google.guava + guava + + diff --git a/hbase-hadoop-compat/pom.xml b/hbase-hadoop-compat/pom.xml index 121998da65..a350af1d0a 100644 --- a/hbase-hadoop-compat/pom.xml +++ b/hbase-hadoop-compat/pom.xml @@ -97,6 +97,10 @@ test-jar test + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + commons-logging diff --git a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/OperationMetrics.java b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/OperationMetrics.java index b34d3e335c..d7a9ecd2b8 100644 --- a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/OperationMetrics.java +++ b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/metrics/OperationMetrics.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.metrics; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Container class for commonly collected metrics for most operations. Instantiate this class to diff --git a/hbase-hadoop2-compat/pom.xml b/hbase-hadoop2-compat/pom.xml index a905966213..2b7947c1ce 100644 --- a/hbase-hadoop2-compat/pom.xml +++ b/hbase-hadoop2-compat/pom.xml @@ -154,6 +154,10 @@ limitations under the License. org.apache.hbase + hbase-metrics-api + + + org.apache.hbase hbase-hadoop-compat ${project.version} test-jar @@ -167,6 +171,12 @@ limitations under the License. org.apache.hadoop hadoop-mapreduce-client-core ${hadoop-two.version} + + + com.google.guava + guava + + org.apache.hadoop @@ -182,8 +192,8 @@ limitations under the License. commons-logging - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java index 565b853e75..1dec2093e7 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/Interns.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.metrics; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.java index 73dc459a74..ea10f6865d 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/MetricsInfoImpl.java @@ -18,8 +18,9 @@ package org.apache.hadoop.hbase.metrics; -import com.google.common.base.Objects; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsInfo; @@ -58,7 +59,7 @@ class MetricsInfoImpl implements MetricsInfo { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name).add("description", description) .toString(); } diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java index ddcf56d134..7db26a512b 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/metrics/impl/GlobalMetricRegistriesAdapter.java @@ -39,7 +39,7 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper; import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This class acts as an adapter to export the MetricRegistry's in the global registry. Each diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java index 1ae9bd42b2..d813a55637 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java @@ -30,7 +30,7 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * JMX caches the beans that have been exported; even after the values are removed from hadoop's diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java index 977536ae87..26a9f65205 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/lib/DynamicMetricsRegistry.java @@ -31,8 +31,8 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.impl.MsInfo; -import com.google.common.base.Objects; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; /** * An optional metrics registry class for creating and maintaining a @@ -394,7 +394,7 @@ public class DynamicMetricsRegistry { } @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("info", metricsInfo).add("tags", tags()).add("metrics", metrics()) .toString(); } diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java index 3e4016d4a8..16093b5262 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/util/MetricSampleQuantiles.java @@ -27,7 +27,7 @@ import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Implementation of the Cormode, Korn, Muthukrishnan, and Srivastava algorithm diff --git a/hbase-it/pom.xml b/hbase-it/pom.xml index a867fbbc09..91296dafd8 100644 --- a/hbase-it/pom.xml +++ b/hbase-it/pom.xml @@ -217,6 +217,12 @@ org.apache.hbase hbase-hadoop-compat + + + com.google.guava + guava + + org.apache.hbase @@ -228,8 +234,8 @@ hbase-testing-util - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous io.dropwizard.metrics @@ -297,12 +303,24 @@ org.apache.hadoop hadoop-mapreduce-client-core + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-jobclient test-jar test + + + com.google.guava + guava + + org.apache.hadoop diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java index ca8e277b67..e1c17a4daa 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestAcidGuarantees.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.regionserver.CompactingMemStore; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java index 8596489652..f041f72f29 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBackupRestore.java @@ -46,14 +46,15 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.IntegrationTests; import org.apache.hadoop.util.ToolRunner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.base.Objects; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * An integration test to detect regressions in HBASE-7912. Create @@ -297,7 +298,8 @@ public class IntegrationTestBackupRestore extends IntegrationTestBase { rowsInBatch = Integer.parseInt(cmd.getOptionValue(NB_ROWS_IN_BATCH_KEY, Integer.toString(DEFAULT_NB_ROWS_IN_BATCH))); - LOG.info(Objects.toStringHelper("Parsed Options").add(REGION_COUNT_KEY, regionsCountPerServer) + LOG.info(MoreObjects.toStringHelper("Parsed Options"). + add(REGION_COUNT_KEY, regionsCountPerServer) .add(REGIONSERVER_COUNT_KEY, regionServerCount).add(NB_ROWS_IN_BATCH_KEY, rowsInBatch) .toString()); } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java index 7b6635ed4b..76be4e8a56 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestIngest.java @@ -36,7 +36,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * A base class for tests that do something with the cluster while running diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java index c3c5df3658..d649bdb7e9 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaPerf.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hbase; -import com.google.common.base.Objects; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import com.codahale.metrics.Histogram; import org.apache.commons.cli.CommandLine; import org.apache.commons.logging.Log; @@ -149,7 +149,7 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("numRows", numRows) .add("elapsedTime", elapsedTime) .toString(); @@ -214,7 +214,7 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase { primaryTimeout = Integer.parseInt(cmd.getOptionValue(PRIMARY_TIMEOUT_KEY, PRIMARY_TIMEOUT_DEFAULT)); clusterSize = Integer.parseInt(cmd.getOptionValue(NUM_RS_KEY, NUM_RS_DEFAULT)); - LOG.debug(Objects.toStringHelper("Parsed Options") + LOG.debug(MoreObjects.toStringHelper("Parsed Options") .add(TABLE_NAME_KEY, tableName) .add(SLEEP_TIME_KEY, sleepTime) .add(REPLICA_COUNT_KEY, replicaCount) @@ -313,7 +313,7 @@ public class IntegrationTestRegionReplicaPerf extends IntegrationTestBase { double withReplicas9999Mean = calcMean("withReplicas", Stat.FOUR_9S, resultsWithReplicas); - LOG.info(Objects.toStringHelper(this) + LOG.info(MoreObjects.toStringHelper(this) .add("withoutReplicas", resultsWithoutReplicas) .add("withReplicas", resultsWithReplicas) .add("withoutReplicasStdevMean", withoutReplicasStdevMean) diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java index b6cfdcda53..0e773bb98d 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java @@ -38,7 +38,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Integration test for testing async wal replication to secondary region replicas. Sets up a table diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java index 2a69c8ce42..c53542c5cc 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import org.apache.hadoop.hbase.util.ReflectionUtils; /** diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java index 1ee7d936c5..82c51adac4 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/util/ChaosMonkeyRunner.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.util.ToolRunner; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; public class ChaosMonkeyRunner extends AbstractHBaseTool { private static final Log LOG = LogFactory.getLog(ChaosMonkeyRunner.class); diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java index df83731746..f955610747 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/ipc/IntegrationTestRpcClient.java @@ -52,7 +52,7 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(IntegrationTests.class) public class IntegrationTestRpcClient { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java index e39d0fe03f..52f1223e59 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.commons.cli.CommandLine; import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.logging.Log; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java index bd14c31aaf..2aebc74a97 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java @@ -63,6 +63,7 @@ import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException; import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LoadTestTool; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import org.apache.htrace.Span; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; @@ -72,7 +73,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; /** * Integration test that should benchmark how fast HBase can recover from failures. This test starts @@ -339,7 +340,7 @@ public class IntegrationTestMTTR { long runtimeMs = TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS); - Objects.ToStringHelper helper = Objects.toStringHelper("MTTRResults") + MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper("MTTRResults") .add("putResults", resultPuts) .add("scanResults", resultScan) .add("adminResults", resultAdmin) @@ -373,7 +374,7 @@ public class IntegrationTestMTTR { @Override public String toString() { - Objects.ToStringHelper helper = Objects.toStringHelper(this) + MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this) .add("numResults", stats.getN()) .add("minTime", stats.getMin()) .add("meanTime", stats.getMean()) diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 1b23de8799..2fdfab629e 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -118,7 +118,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * This is an integration test borrowed from goraci, written by Keith Turner, diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java index 9eacc5a7fd..f0425213ad 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java @@ -85,7 +85,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * A large test which loads a lot of data that has internal references, and diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java index bf534f3f9a..34af01b30b 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.test; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.commons.cli.CommandLine; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java index b03a5864cc..5a654ba0c1 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedMultiGetRequestsWithRegionReplicas.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.util.LoadTestTool; import org.apache.hadoop.util.ToolRunner; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Extends {@link IntegrationTestTimeBoundedRequestsWithRegionReplicas} for multi-gets diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java index e92f863afb..5bd2c87330 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestTimeBoundedRequestsWithRegionReplicas.java @@ -54,7 +54,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Assert; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * An IntegrationTest for doing reads with a timeout, to a read-only table with region diff --git a/hbase-metrics-api/pom.xml b/hbase-metrics-api/pom.xml index 9add123d46..65a2d8ed46 100644 --- a/hbase-metrics-api/pom.xml +++ b/hbase-metrics-api/pom.xml @@ -93,6 +93,14 @@ commons-logging commons-logging + + commons-lang + commons-lang + + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + diff --git a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java index 4fef10c99a..c9418905ad 100644 --- a/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java +++ b/hbase-metrics-api/src/main/java/org/apache/hadoop/hbase/metrics/MetricRegistriesLoader.java @@ -30,7 +30,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.ReflectionUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; @InterfaceAudience.Private public class MetricRegistriesLoader { diff --git a/hbase-metrics-api/src/test/java/org/apache/hadoop/hbase/metrics/TestMetricRegistriesLoader.java b/hbase-metrics-api/src/test/java/org/apache/hadoop/hbase/metrics/TestMetricRegistriesLoader.java index 8746146ded..5bbb52b4ba 100644 --- a/hbase-metrics-api/src/test/java/org/apache/hadoop/hbase/metrics/TestMetricRegistriesLoader.java +++ b/hbase-metrics-api/src/test/java/org/apache/hadoop/hbase/metrics/TestMetricRegistriesLoader.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Test class for {@link MetricRegistriesLoader}. @@ -53,4 +53,4 @@ public class TestMetricRegistriesLoader { assertNotEquals(loader2, instance); assertNotEquals(loader3, instance); } -} \ No newline at end of file +} diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml index fe40346414..4ce000f825 100644 --- a/hbase-metrics/pom.xml +++ b/hbase-metrics/pom.xml @@ -73,6 +73,10 @@ + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + org.apache.hbase diff --git a/hbase-metrics/src/test/java/org/apache/hadoop/hbase/metrics/impl/TestRefCountingMap.java b/hbase-metrics/src/test/java/org/apache/hadoop/hbase/metrics/impl/TestRefCountingMap.java index cf31d50263..d4c249c215 100644 --- a/hbase-metrics/src/test/java/org/apache/hadoop/hbase/metrics/impl/TestRefCountingMap.java +++ b/hbase-metrics/src/test/java/org/apache/hadoop/hbase/metrics/impl/TestRefCountingMap.java @@ -33,7 +33,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(SmallTests.class) public class TestRefCountingMap { diff --git a/hbase-prefix-tree/pom.xml b/hbase-prefix-tree/pom.xml index 4c6dc7bfe1..0cf59a202e 100644 --- a/hbase-prefix-tree/pom.xml +++ b/hbase-prefix-tree/pom.xml @@ -97,6 +97,10 @@ org.apache.hbase hbase-annotations + + + org.apache.hbase + hbase-annotations test-jar test @@ -110,8 +114,8 @@ ${project.version} - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous commons-logging @@ -154,6 +158,12 @@ org.apache.hadoop hadoop-common + + + com.google.guava + guava + + @@ -176,6 +186,12 @@ org.apache.hadoop hadoop-common + + + com.google.guava + guava + + diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java index bfed9956a7..7df6f5fe00 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellScannerPosition; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; -import com.google.common.primitives.UnsignedBytes; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.UnsignedBytes; /** *

diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java index b30daf6917..85ab694cc1 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode; import org.apache.hadoop.hbase.util.CollectionUtils; import org.apache.hadoop.hbase.util.vint.UFIntTool; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** *

diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java index 3597fbef34..98f8d0d1ed 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.ArrayUtils; import org.apache.hadoop.hbase.util.CollectionUtils; import org.apache.hadoop.hbase.util.vint.UFIntTool; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; /** * Used to de-duplicate, sort, minimize/diff, and serialize timestamps and mvccVersions from a diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java index e61d45de59..0e23167105 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.encode.PrefixTreeEncoder; import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode; import org.apache.hadoop.hbase.util.vint.UFIntTool; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Most of the complexity of the PrefixTree is contained in the "row section". It contains the row diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java index e2824b0f81..36576d6125 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CollectionUtils; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Data structure used in the first stage of PrefixTree encoding: diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java index 25bee1f8bd..3df69ca0a7 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.CollectionUtils; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.util.Strings; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Individual node in a Trie structure. Each node is one of 3 types: diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/ByteRangeSet.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/ByteRangeSet.java index 0ec9107652..7bd3b9ca21 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/ByteRangeSet.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/byterange/ByteRangeSet.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Performance oriented class for de-duping and storing arbitrary byte[]'s arriving in non-sorted diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java index 2ec51efd80..49bc81e4c6 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java @@ -24,7 +24,7 @@ import java.util.List; import org.apache.hadoop.hbase.codec.prefixtree.builder.data.TestTokenizerDataBasic; import org.apache.hadoop.hbase.codec.prefixtree.builder.data.TestTokenizerDataEdgeCase; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public interface TestTokenizerData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTreeDepth.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTreeDepth.java index 87fcf073a7..83d6e80114 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTreeDepth.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTreeDepth.java @@ -30,7 +30,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MiscTests.class, SmallTests.class}) public class TestTreeDepth { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataBasic.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataBasic.java index f925115cee..1df09f3a26 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataBasic.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataBasic.java @@ -23,7 +23,7 @@ import java.util.List; import org.apache.hadoop.hbase.codec.prefixtree.builder.TestTokenizerData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestTokenizerDataBasic implements TestTokenizerData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataEdgeCase.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataEdgeCase.java index 87457d0f83..3e25d835bf 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataEdgeCase.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/data/TestTokenizerDataEdgeCase.java @@ -23,7 +23,7 @@ import java.util.List; import org.apache.hadoop.hbase.codec.prefixtree.builder.TestTokenizerData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestTokenizerDataEdgeCase implements TestTokenizerData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java index 49347670d3..19a6311f54 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnBuilder.java @@ -44,7 +44,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MiscTests.class, SmallTests.class}) @RunWith(Parameterized.class) diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java index 3b206f285d..d9315ab138 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.column.data.TestColumnDataRandom import org.apache.hadoop.hbase.codec.prefixtree.column.data.TestColumnDataSimple; import org.apache.hadoop.hbase.util.ByteRange; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public interface TestColumnData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java index 579c5a0fbd..77036be2fa 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataRandom.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.byterange.ByteRangeSet; import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet; import org.apache.hadoop.hbase.util.test.RedundantKVGenerator; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestColumnDataRandom implements TestColumnData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataSimple.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataSimple.java index bdb77ea3e0..721240af6f 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataSimple.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/data/TestColumnDataSimple.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.ByteRangeUtils; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestColumnDataSimple implements TestColumnData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java index 89ceacff28..95df90fc3f 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/BaseTestRowData.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public abstract class BaseTestRowData implements TestRowData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java index 4bf60e03df..8d187120cc 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataUrls; import org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataUrlsExample; import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /* * A master class for registering different implementations of TestRowData. diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java index 765d0396f6..22119bef92 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowEncoder.java @@ -45,7 +45,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({MiscTests.class, SmallTests.class}) @RunWith(Parameterized.class) diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java index 66fe3f33fe..bee4eac99a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataComplexQualifiers.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataComplexQualifiers extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java index 4d05742165..3db52cca45 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDeeper.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /* * Goes beyond a trivial trie to add a branch on the "cf" node diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java index 8639e8fb3d..b7793ce6ba 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataDifferentTimestamps.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /* * test different timestamps diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataEmpty.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataEmpty.java index 3bb23fa5c9..f36c7c74f7 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataEmpty.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataEmpty.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataEmpty extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java index 1f9b45917a..16ef39cd5c 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataExerciseFInts.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /* * test different timestamps diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataMultiFamilies.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataMultiFamilies.java index ab848d696a..90a580b225 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataMultiFamilies.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataMultiFamilies.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataMultiFamilies extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java index a818f5e413..c47cae522b 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNub.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataNub extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java index 53931369d8..255376a156 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataNumberStrings.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataNumberStrings extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java index 6d3918ff45..6d45efaf18 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataQualifierByteOrdering.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataQualifierByteOrdering extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java index 5834d1b28e..9ee9be8a7d 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValues.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.test.RedundantKVGenerator; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataRandomKeyValues extends BaseTestRowData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java index 9fac9db29a..86bdf0274f 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataRandomKeyValuesWithTags.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.test.RedundantKVGenerator; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Generated KVs with tags */ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearchWithPrefix.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearchWithPrefix.java index 385dc42a02..0c4bd5bd6a 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearchWithPrefix.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearchWithPrefix.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataSearchWithPrefix extends BaseTestRowData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java index 916d7f50d4..2fbd4b444d 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSearcherRowMiss.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataSearcherRowMiss extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java index 62234fcc0f..edb6dde7fd 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSimple.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CollectionUtils; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataSimple extends BaseTestRowData { diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java index 35ba2c137a..294dc3f1c8 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataSingleQualifier.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeTestConstants; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataSingleQualifier extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java index 2a53976cb7..56c1f6a4d7 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivial.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataTrivial extends BaseTestRowData{ diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java index a7edfe7914..669f19a2e1 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataTrivialWithTags.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.scanner.CellSearcher; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public class TestRowDataTrivialWithTags extends BaseTestRowData{ static byte[] rA = Bytes.toBytes("rA"), rB = Bytes.toBytes("rB"),// turn "r" diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java index 02766179c3..3ac7877782 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrls.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.util.byterange.impl.ByteRangeTreeSet; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /* * test different timestamps diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java index 56d4e85445..a0c2ee1185 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/data/TestRowDataUrlsExample.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode; import org.apache.hadoop.hbase.codec.prefixtree.row.BaseTestRowData; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /* * test different timestamps diff --git a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java index c3618ffbb3..00704f40c2 100644 --- a/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java +++ b/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampData import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataNumbers; import org.apache.hadoop.hbase.codec.prefixtree.timestamp.data.TestTimestampDataRepeats; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; public interface TestTimestampData { diff --git a/hbase-procedure/pom.xml b/hbase-procedure/pom.xml index 089124a406..fb706ba60e 100644 --- a/hbase-procedure/pom.xml +++ b/hbase-procedure/pom.xml @@ -79,8 +79,8 @@ hbase-common - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous commons-logging diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java index 2c26ab9012..8b9dc48d9a 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.NonceKey; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Base Procedure class responsible for Procedure Metadata; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java index 946d4504ad..bc80445e28 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.procedure2; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java index 233ef57e49..a5a126d556 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureScheduler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.procedure2; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.List; import java.util.concurrent.TimeUnit; diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java index 7ce75685c0..3232f2b3ba 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureUtil.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; import org.apache.hadoop.hbase.util.ForeignExceptionUtil; import org.apache.hadoop.hbase.util.NonceKey; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Helper to convert to/from ProcedureProtos diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java index 8d5ff3c5bd..cc9d9398d5 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.procedure2.util.StringUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; -import com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; /** * A procedure dispatcher that aggregates and sends after elapsed time or after we hit diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java index f3523ec0ac..69c59c8139 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java @@ -23,7 +23,7 @@ import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Simple scheduler for procedures diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java index 1791caeb7f..ed191bb0f8 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.java @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedu import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.ipc.RemoteException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * WAL implementation of the ProcedureStore. diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java index 54cd60b6f1..7f98b80abc 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureUtil.java @@ -22,6 +22,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ProcedureInfo; import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.util.JsonFormat; import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -71,4 +72,11 @@ public class TestProcedureUtil { public static class TestProcedureNoDefaultConstructor extends TestProcedure { public TestProcedureNoDefaultConstructor(int x) {} } + + public static void main(final String [] args) throws Exception { + final TestProcedure proc1 = new TestProcedure(10); + final ProcedureProtos.Procedure proto1 = ProcedureUtil.convertToProtoProcedure(proc1); + JsonFormat.Printer printer = JsonFormat.printer().omittingInsignificantWhitespace(); + System.out.println(printer.print(proto1)); + } } diff --git a/hbase-protocol-shaded/pom.xml b/hbase-protocol-shaded/pom.xml index 48c6430e81..ed58ecea13 100644 --- a/hbase-protocol-shaded/pom.xml +++ b/hbase-protocol-shaded/pom.xml @@ -33,7 +33,7 @@ true - 3.2.0 + 3.3.0 ${project.build.directory}/classes diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java index 4a6fefa4ab..61490d49c2 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueD import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLite; - import java.io.IOException; import java.io.InputStream; import java.util.Arrays; @@ -328,8 +327,12 @@ public abstract class AbstractMessage extends AbstractMessageLite.Builder implements Message.Builder { // The compiler produces an error if this is not declared explicitly. + // Method isn't abstract to bypass Java 1.6 compiler issue: + // http://bugs.java.com/view_bug.do?bug_id=6908259 @Override - public abstract BuilderType clone(); + public BuilderType clone() { + throw new UnsupportedOperationException("clone() should be implemented in subclasses."); + } /** TODO(jieluo): Clear it when all subclasses have implemented this method. */ @Override diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java index e5b87f072c..ca2e3282f6 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessageLite.java @@ -30,6 +30,8 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; + import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; @@ -351,22 +353,23 @@ public abstract class AbstractMessageLite< */ protected static void addAll(final Iterable values, final Collection list) { - if (values == null) { - throw new NullPointerException(); - } + checkNotNull(values); if (values instanceof LazyStringList) { // For StringOrByteStringLists, check the underlying elements to avoid // forcing conversions of ByteStrings to Strings. + // TODO(dweis): Could we just prohibit nulls in all protobuf lists and get rid of this? Is + // if even possible to hit this condition as all protobuf methods check for null first, + // right? checkForNullValues(((LazyStringList) values).getUnderlyingElements()); list.addAll((Collection) values); } else if (values instanceof Collection) { - checkForNullValues(values); + if (!(values instanceof PrimitiveNonBoxingCollection)) { + checkForNullValues(values); + } list.addAll((Collection) values); } else { for (final T value : values) { - if (value == null) { - throw new NullPointerException(); - } + checkNotNull(value); list.add(value); } } @@ -374,9 +377,7 @@ public abstract class AbstractMessageLite< private static void checkForNullValues(final Iterable values) { for (final Object value : values) { - if (value == null) { - throw new NullPointerException(); - } + checkNotNull(value); } } } diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java index 5fe6a22d24..25f6bb8d32 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java @@ -31,9 +31,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; import org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.LimitedInputStream; - import java.io.IOException; import java.io.InputStream; +import java.nio.ByteBuffer; /** * A partial implementation of the {@link Parser} interface which implements @@ -131,6 +131,30 @@ public abstract class AbstractParser } @Override + public MessageType parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry) + throws InvalidProtocolBufferException { + MessageType message; + try { + CodedInputStream input = CodedInputStream.newInstance(data); + message = parsePartialFrom(input, extensionRegistry); + try { + input.checkLastTagWas(0); + } catch (InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(message); + } + } catch (InvalidProtocolBufferException e) { + throw e; + } + + return checkMessageInitialized(message); + } + + @Override + public MessageType parseFrom(ByteBuffer data) throws InvalidProtocolBufferException { + return parseFrom(data, EMPTY_REGISTRY); + } + + @Override public MessageType parsePartialFrom( byte[] data, int off, int len, ExtensionRegistryLite extensionRegistry) throws InvalidProtocolBufferException { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java index bf8e852655..e23674ffa6 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Any.java @@ -184,6 +184,7 @@ public final class Any extends private volatile org.apache.hadoop.hbase.shaded.com.google.protobuf.Message cachedUnpackValue; + @java.lang.SuppressWarnings("unchecked") public T unpack( java.lang.Class clazz) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { @@ -362,6 +363,17 @@ public final class Any extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java index 3265158e22..9d2b445544 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Api.java @@ -624,6 +624,17 @@ public final class Api extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Api parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java index a2dbe4a946..89bb349343 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BoolValue.java @@ -152,6 +152,17 @@ public final class BoolValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BoolValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java index 6bc9641747..cd0a19d65c 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BooleanArrayList.java @@ -30,8 +30,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.BooleanList; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.BooleanList; import java.util.Arrays; import java.util.Collection; import java.util.RandomAccess; @@ -41,9 +42,8 @@ import java.util.RandomAccess; * * @author dweis@google.com (Daniel Weis) */ -final class BooleanArrayList - extends AbstractProtobufList - implements BooleanList, RandomAccess { +final class BooleanArrayList extends AbstractProtobufList + implements BooleanList, RandomAccess, PrimitiveNonBoxingCollection { private static final BooleanArrayList EMPTY_LIST = new BooleanArrayList(); static { @@ -198,9 +198,7 @@ final class BooleanArrayList public boolean addAll(Collection collection) { ensureIsMutable(); - if (collection == null) { - throw new NullPointerException(); - } + checkNotNull(collection); // We specialize when adding another BooleanArrayList to avoid boxing elements. if (!(collection instanceof BooleanArrayList)) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java index ecfbcc16cb..192a3d1d6b 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/BytesValue.java @@ -151,6 +151,17 @@ public final class BytesValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.BytesValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java index 23cc1a4d88..951e479d1f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedInputStream.java @@ -359,9 +359,9 @@ public abstract class CodedInputStream { * *

Set the maximum message size. In order to prevent malicious messages from exhausting memory * or causing integer overflows, {@code CodedInputStream} limits how large a message may be. The - * default limit is 64MB. You should set this limit as small as you can without harming your app's - * functionality. Note that size limits only apply when reading from an {@code InputStream}, not - * when constructed around a raw byte array (nor with {@link ByteString#newCodedInput}). + * default limit is {@code Integer.MAX_INT}. You should set this limit as small as you can without + * harming your app's functionality. Note that size limits only apply when reading from an {@code + * InputStream}, not when constructed around a raw byte array. * *

If you want to read several messages from a single CodedInputStream, you could call {@link * #resetSizeCounter()} after each one to avoid hitting the size limit. diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java index 03871c9388..b26f9393d0 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/CodedOutputStream.java @@ -184,7 +184,7 @@ public abstract class CodedOutputStream extends ByteOutput { * maps are sorted on the lexicographical order of the UTF8 encoded keys. * */ - void useDeterministicSerialization() { + public void useDeterministicSerialization() { serializationDeterministic = true; } @@ -1854,7 +1854,7 @@ public abstract class CodedOutputStream extends ByteOutput { } static boolean isSupported() { - return UnsafeUtil.hasUnsafeByteBufferOperations(); + return UnsafeUtil.hasUnsafeByteBufferOperations() && UnsafeUtil.hasUnsafeCopyMemory(); } @Override diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java index 0468e6c5a9..35f7f5fe31 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DescriptorProtos.java @@ -234,6 +234,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorSet parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2117,6 +2128,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5298,6 +5320,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ExtensionRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5889,6 +5922,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto.ReservedRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6850,6 +6894,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.DescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10899,6 +10954,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12389,6 +12455,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13242,6 +13319,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14337,6 +14425,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15231,6 +15330,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16592,6 +16702,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodDescriptorProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17777,6 +17898,35 @@ public final class DescriptorProtos { /** *

+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     * 
+ * + * optional string php_class_prefix = 40; + */ + boolean hasPhpClassPrefix(); + /** + *
+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     * 
+ * + * optional string php_class_prefix = 40; + */ + java.lang.String getPhpClassPrefix(); + /** + *
+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     * 
+ * + * optional string php_class_prefix = 40; + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getPhpClassPrefixBytes(); + + /** + *
      * The parser stores options it doesn't recognize here. See above.
      * 
* @@ -17847,6 +17997,7 @@ public final class DescriptorProtos { objcClassPrefix_ = ""; csharpNamespace_ = ""; swiftPrefix_ = ""; + phpClassPrefix_ = ""; uninterpretedOption_ = java.util.Collections.emptyList(); } @@ -17965,10 +18116,16 @@ public final class DescriptorProtos { swiftPrefix_ = bs; break; } + case 322: { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00008000; + phpClassPrefix_ = bs; + break; + } case 7994: { - if (!((mutable_bitField0_ & 0x00008000) == 0x00008000)) { + if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) { uninterpretedOption_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00008000; + mutable_bitField0_ |= 0x00010000; } uninterpretedOption_.add( input.readMessage(org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.PARSER, extensionRegistry)); @@ -17982,7 +18139,7 @@ public final class DescriptorProtos { throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00008000) == 0x00008000)) { + if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); } this.unknownFields = unknownFields.build(); @@ -18726,6 +18883,63 @@ public final class DescriptorProtos { } } + public static final int PHP_CLASS_PREFIX_FIELD_NUMBER = 40; + private volatile java.lang.Object phpClassPrefix_; + /** + *
+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     * 
+ * + * optional string php_class_prefix = 40; + */ + public boolean hasPhpClassPrefix() { + return ((bitField0_ & 0x00008000) == 0x00008000); + } + /** + *
+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     * 
+ * + * optional string php_class_prefix = 40; + */ + public java.lang.String getPhpClassPrefix() { + java.lang.Object ref = phpClassPrefix_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + phpClassPrefix_ = s; + } + return s; + } + } + /** + *
+     * Sets the php class prefix which is prepended to all php generated classes
+     * from this .proto. Default is empty.
+     * 
+ * + * optional string php_class_prefix = 40; + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getPhpClassPrefixBytes() { + java.lang.Object ref = phpClassPrefix_; + if (ref instanceof java.lang.String) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + phpClassPrefix_ = b; + return b; + } else { + return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + } + } + public static final int UNINTERPRETED_OPTION_FIELD_NUMBER = 999; private java.util.List uninterpretedOption_; /** @@ -18851,6 +19065,9 @@ public final class DescriptorProtos { if (((bitField0_ & 0x00004000) == 0x00004000)) { org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 39, swiftPrefix_); } + if (((bitField0_ & 0x00008000) == 0x00008000)) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 40, phpClassPrefix_); + } for (int i = 0; i < uninterpretedOption_.size(); i++) { output.writeMessage(999, uninterpretedOption_.get(i)); } @@ -18917,6 +19134,9 @@ public final class DescriptorProtos { if (((bitField0_ & 0x00004000) == 0x00004000)) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(39, swiftPrefix_); } + if (((bitField0_ & 0x00008000) == 0x00008000)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(40, phpClassPrefix_); + } for (int i = 0; i < uninterpretedOption_.size(); i++) { size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream .computeMessageSize(999, uninterpretedOption_.get(i)); @@ -19013,6 +19233,11 @@ public final class DescriptorProtos { result = result && getSwiftPrefix() .equals(other.getSwiftPrefix()); } + result = result && (hasPhpClassPrefix() == other.hasPhpClassPrefix()); + if (hasPhpClassPrefix()) { + result = result && getPhpClassPrefix() + .equals(other.getPhpClassPrefix()); + } result = result && getUninterpretedOptionList() .equals(other.getUninterpretedOptionList()); result = result && unknownFields.equals(other.unknownFields); @@ -19096,6 +19321,10 @@ public final class DescriptorProtos { hash = (37 * hash) + SWIFT_PREFIX_FIELD_NUMBER; hash = (53 * hash) + getSwiftPrefix().hashCode(); } + if (hasPhpClassPrefix()) { + hash = (37 * hash) + PHP_CLASS_PREFIX_FIELD_NUMBER; + hash = (53 * hash) + getPhpClassPrefix().hashCode(); + } if (getUninterpretedOptionCount() > 0) { hash = (37 * hash) + UNINTERPRETED_OPTION_FIELD_NUMBER; hash = (53 * hash) + getUninterpretedOptionList().hashCode(); @@ -19107,6 +19336,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FileOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -19251,9 +19491,11 @@ public final class DescriptorProtos { bitField0_ = (bitField0_ & ~0x00002000); swiftPrefix_ = ""; bitField0_ = (bitField0_ & ~0x00004000); + phpClassPrefix_ = ""; + bitField0_ = (bitField0_ & ~0x00008000); if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00008000); + bitField0_ = (bitField0_ & ~0x00010000); } else { uninterpretedOptionBuilder_.clear(); } @@ -19341,10 +19583,14 @@ public final class DescriptorProtos { to_bitField0_ |= 0x00004000; } result.swiftPrefix_ = swiftPrefix_; + if (((from_bitField0_ & 0x00008000) == 0x00008000)) { + to_bitField0_ |= 0x00008000; + } + result.phpClassPrefix_ = phpClassPrefix_; if (uninterpretedOptionBuilder_ == null) { - if (((bitField0_ & 0x00008000) == 0x00008000)) { + if (((bitField0_ & 0x00010000) == 0x00010000)) { uninterpretedOption_ = java.util.Collections.unmodifiableList(uninterpretedOption_); - bitField0_ = (bitField0_ & ~0x00008000); + bitField0_ = (bitField0_ & ~0x00010000); } result.uninterpretedOption_ = uninterpretedOption_; } else { @@ -19472,11 +19718,16 @@ public final class DescriptorProtos { swiftPrefix_ = other.swiftPrefix_; onChanged(); } + if (other.hasPhpClassPrefix()) { + bitField0_ |= 0x00008000; + phpClassPrefix_ = other.phpClassPrefix_; + onChanged(); + } if (uninterpretedOptionBuilder_ == null) { if (!other.uninterpretedOption_.isEmpty()) { if (uninterpretedOption_.isEmpty()) { uninterpretedOption_ = other.uninterpretedOption_; - bitField0_ = (bitField0_ & ~0x00008000); + bitField0_ = (bitField0_ & ~0x00010000); } else { ensureUninterpretedOptionIsMutable(); uninterpretedOption_.addAll(other.uninterpretedOption_); @@ -19489,7 +19740,7 @@ public final class DescriptorProtos { uninterpretedOptionBuilder_.dispose(); uninterpretedOptionBuilder_ = null; uninterpretedOption_ = other.uninterpretedOption_; - bitField0_ = (bitField0_ & ~0x00008000); + bitField0_ = (bitField0_ & ~0x00010000); uninterpretedOptionBuilder_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getUninterpretedOptionFieldBuilder() : null; @@ -20701,12 +20952,118 @@ public final class DescriptorProtos { return this; } + private java.lang.Object phpClassPrefix_ = ""; + /** + *
+       * Sets the php class prefix which is prepended to all php generated classes
+       * from this .proto. Default is empty.
+       * 
+ * + * optional string php_class_prefix = 40; + */ + public boolean hasPhpClassPrefix() { + return ((bitField0_ & 0x00008000) == 0x00008000); + } + /** + *
+       * Sets the php class prefix which is prepended to all php generated classes
+       * from this .proto. Default is empty.
+       * 
+ * + * optional string php_class_prefix = 40; + */ + public java.lang.String getPhpClassPrefix() { + java.lang.Object ref = phpClassPrefix_; + if (!(ref instanceof java.lang.String)) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + phpClassPrefix_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Sets the php class prefix which is prepended to all php generated classes
+       * from this .proto. Default is empty.
+       * 
+ * + * optional string php_class_prefix = 40; + */ + public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getPhpClassPrefixBytes() { + java.lang.Object ref = phpClassPrefix_; + if (ref instanceof String) { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + phpClassPrefix_ = b; + return b; + } else { + return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Sets the php class prefix which is prepended to all php generated classes
+       * from this .proto. Default is empty.
+       * 
+ * + * optional string php_class_prefix = 40; + */ + public Builder setPhpClassPrefix( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00008000; + phpClassPrefix_ = value; + onChanged(); + return this; + } + /** + *
+       * Sets the php class prefix which is prepended to all php generated classes
+       * from this .proto. Default is empty.
+       * 
+ * + * optional string php_class_prefix = 40; + */ + public Builder clearPhpClassPrefix() { + bitField0_ = (bitField0_ & ~0x00008000); + phpClassPrefix_ = getDefaultInstance().getPhpClassPrefix(); + onChanged(); + return this; + } + /** + *
+       * Sets the php class prefix which is prepended to all php generated classes
+       * from this .proto. Default is empty.
+       * 
+ * + * optional string php_class_prefix = 40; + */ + public Builder setPhpClassPrefixBytes( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00008000; + phpClassPrefix_ = value; + onChanged(); + return this; + } + private java.util.List uninterpretedOption_ = java.util.Collections.emptyList(); private void ensureUninterpretedOptionIsMutable() { - if (!((bitField0_ & 0x00008000) == 0x00008000)) { + if (!((bitField0_ & 0x00010000) == 0x00010000)) { uninterpretedOption_ = new java.util.ArrayList(uninterpretedOption_); - bitField0_ |= 0x00008000; + bitField0_ |= 0x00010000; } } @@ -20900,7 +21257,7 @@ public final class DescriptorProtos { public Builder clearUninterpretedOption() { if (uninterpretedOptionBuilder_ == null) { uninterpretedOption_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00008000); + bitField0_ = (bitField0_ & ~0x00010000); onChanged(); } else { uninterpretedOptionBuilder_.clear(); @@ -21005,7 +21362,7 @@ public final class DescriptorProtos { uninterpretedOptionBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.Builder, org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOptionOrBuilder>( uninterpretedOption_, - ((bitField0_ & 0x00008000) == 0x00008000), + ((bitField0_ & 0x00010000) == 0x00010000), getParentForChildren(), isClean()); uninterpretedOption_ = null; @@ -21727,6 +22084,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MessageOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -23792,6 +24160,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.FieldOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -25216,6 +25595,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.OneofOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26267,6 +26657,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -27383,6 +27784,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.EnumValueOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28438,6 +28850,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.ServiceOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -29666,6 +30089,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.MethodOptions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30920,6 +31354,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption.NamePart parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -31683,6 +32128,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.UninterpretedOption parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -33898,6 +34354,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo.Location parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -35382,6 +35849,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.SourceCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -37333,6 +37811,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo.Annotation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -38069,6 +38558,17 @@ public final class DescriptorProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.GeneratedCodeInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -38898,7 +39398,7 @@ public final class DescriptorProtos { "\n\013output_type\030\003 \001(\t\022/\n\007options\030\004 \001(\0132\036.g" + "oogle.protobuf.MethodOptions\022\037\n\020client_s" + "treaming\030\005 \001(\010:\005false\022\037\n\020server_streamin" + - "g\030\006 \001(\010:\005false\"\232\005\n\013FileOptions\022\024\n\014java_p" + + "g\030\006 \001(\010:\005false\"\264\005\n\013FileOptions\022\024\n\014java_p" + "ackage\030\001 \001(\t\022\034\n\024java_outer_classname\030\010 \001" + "(\t\022\"\n\023java_multiple_files\030\n \001(\010:\005false\022)" + "\n\035java_generate_equals_and_hash\030\024 \001(\010B\002\030" + @@ -38911,67 +39411,68 @@ public final class DescriptorProtos { "se\022\031\n\ndeprecated\030\027 \001(\010:\005false\022\037\n\020cc_enab" + "le_arenas\030\037 \001(\010:\005false\022\031\n\021objc_class_pre" + "fix\030$ \001(\t\022\030\n\020csharp_namespace\030% \001(\t\022\024\n\014s" + - "wift_prefix\030\' \001(\t\022C\n\024uninterpreted_optio" + - "n\030\347\007 \003(\0132$.google.protobuf.Uninterpreted", - "Option\":\n\014OptimizeMode\022\t\n\005SPEED\020\001\022\r\n\tCOD" + - "E_SIZE\020\002\022\020\n\014LITE_RUNTIME\020\003*\t\010\350\007\020\200\200\200\200\002J\004\010" + - "&\020\'\"\354\001\n\016MessageOptions\022&\n\027message_set_wi" + - "re_format\030\001 \001(\010:\005false\022.\n\037no_standard_de" + - "scriptor_accessor\030\002 \001(\010:\005false\022\031\n\ndeprec" + - "ated\030\003 \001(\010:\005false\022\021\n\tmap_entry\030\007 \001(\010\022C\n\024" + - "uninterpreted_option\030\347\007 \003(\0132$.google.pro" + - "tobuf.UninterpretedOption*\t\010\350\007\020\200\200\200\200\002J\004\010\010" + - "\020\t\"\236\003\n\014FieldOptions\022:\n\005ctype\030\001 \001(\0162#.goo" + - "gle.protobuf.FieldOptions.CType:\006STRING\022", - "\016\n\006packed\030\002 \001(\010\022?\n\006jstype\030\006 \001(\0162$.google" + - ".protobuf.FieldOptions.JSType:\tJS_NORMAL" + - "\022\023\n\004lazy\030\005 \001(\010:\005false\022\031\n\ndeprecated\030\003 \001(" + - "\010:\005false\022\023\n\004weak\030\n \001(\010:\005false\022C\n\024uninter" + + "wift_prefix\030\' \001(\t\022\030\n\020php_class_prefix\030( " + + "\001(\t\022C\n\024uninterpreted_option\030\347\007 \003(\0132$.goo", + "gle.protobuf.UninterpretedOption\":\n\014Opti" + + "mizeMode\022\t\n\005SPEED\020\001\022\r\n\tCODE_SIZE\020\002\022\020\n\014LI" + + "TE_RUNTIME\020\003*\t\010\350\007\020\200\200\200\200\002J\004\010&\020\'\"\362\001\n\016Messag" + + "eOptions\022&\n\027message_set_wire_format\030\001 \001(" + + "\010:\005false\022.\n\037no_standard_descriptor_acces" + + "sor\030\002 \001(\010:\005false\022\031\n\ndeprecated\030\003 \001(\010:\005fa" + + "lse\022\021\n\tmap_entry\030\007 \001(\010\022C\n\024uninterpreted_" + + "option\030\347\007 \003(\0132$.google.protobuf.Uninterp" + + "retedOption*\t\010\350\007\020\200\200\200\200\002J\004\010\010\020\tJ\004\010\t\020\n\"\236\003\n\014F" + + "ieldOptions\022:\n\005ctype\030\001 \001(\0162#.google.prot", + "obuf.FieldOptions.CType:\006STRING\022\016\n\006packe" + + "d\030\002 \001(\010\022?\n\006jstype\030\006 \001(\0162$.google.protobu" + + "f.FieldOptions.JSType:\tJS_NORMAL\022\023\n\004lazy" + + "\030\005 \001(\010:\005false\022\031\n\ndeprecated\030\003 \001(\010:\005false" + + "\022\023\n\004weak\030\n \001(\010:\005false\022C\n\024uninterpreted_o" + + "ption\030\347\007 \003(\0132$.google.protobuf.Uninterpr" + + "etedOption\"/\n\005CType\022\n\n\006STRING\020\000\022\010\n\004CORD\020" + + "\001\022\020\n\014STRING_PIECE\020\002\"5\n\006JSType\022\r\n\tJS_NORM" + + "AL\020\000\022\r\n\tJS_STRING\020\001\022\r\n\tJS_NUMBER\020\002*\t\010\350\007\020" + + "\200\200\200\200\002J\004\010\004\020\005\"^\n\014OneofOptions\022C\n\024uninterpr", + "eted_option\030\347\007 \003(\0132$.google.protobuf.Uni" + + "nterpretedOption*\t\010\350\007\020\200\200\200\200\002\"\223\001\n\013EnumOpti" + + "ons\022\023\n\013allow_alias\030\002 \001(\010\022\031\n\ndeprecated\030\003" + + " \001(\010:\005false\022C\n\024uninterpreted_option\030\347\007 \003" + + "(\0132$.google.protobuf.UninterpretedOption" + + "*\t\010\350\007\020\200\200\200\200\002J\004\010\005\020\006\"}\n\020EnumValueOptions\022\031\n" + + "\ndeprecated\030\001 \001(\010:\005false\022C\n\024uninterprete" + + "d_option\030\347\007 \003(\0132$.google.protobuf.Uninte" + + "rpretedOption*\t\010\350\007\020\200\200\200\200\002\"{\n\016ServiceOptio" + + "ns\022\031\n\ndeprecated\030! \001(\010:\005false\022C\n\024uninter", "preted_option\030\347\007 \003(\0132$.google.protobuf.U" + - "ninterpretedOption\"/\n\005CType\022\n\n\006STRING\020\000\022" + - "\010\n\004CORD\020\001\022\020\n\014STRING_PIECE\020\002\"5\n\006JSType\022\r\n" + - "\tJS_NORMAL\020\000\022\r\n\tJS_STRING\020\001\022\r\n\tJS_NUMBER" + - "\020\002*\t\010\350\007\020\200\200\200\200\002J\004\010\004\020\005\"^\n\014OneofOptions\022C\n\024u" + - "ninterpreted_option\030\347\007 \003(\0132$.google.prot", - "obuf.UninterpretedOption*\t\010\350\007\020\200\200\200\200\002\"\215\001\n\013" + - "EnumOptions\022\023\n\013allow_alias\030\002 \001(\010\022\031\n\ndepr" + - "ecated\030\003 \001(\010:\005false\022C\n\024uninterpreted_opt" + - "ion\030\347\007 \003(\0132$.google.protobuf.Uninterpret" + - "edOption*\t\010\350\007\020\200\200\200\200\002\"}\n\020EnumValueOptions\022" + - "\031\n\ndeprecated\030\001 \001(\010:\005false\022C\n\024uninterpre" + - "ted_option\030\347\007 \003(\0132$.google.protobuf.Unin" + - "terpretedOption*\t\010\350\007\020\200\200\200\200\002\"{\n\016ServiceOpt" + - "ions\022\031\n\ndeprecated\030! \001(\010:\005false\022C\n\024unint" + - "erpreted_option\030\347\007 \003(\0132$.google.protobuf", - ".UninterpretedOption*\t\010\350\007\020\200\200\200\200\002\"\255\002\n\rMeth" + - "odOptions\022\031\n\ndeprecated\030! \001(\010:\005false\022_\n\021" + - "idempotency_level\030\" \001(\0162/.google.protobu" + - "f.MethodOptions.IdempotencyLevel:\023IDEMPO" + - "TENCY_UNKNOWN\022C\n\024uninterpreted_option\030\347\007" + - " \003(\0132$.google.protobuf.UninterpretedOpti" + - "on\"P\n\020IdempotencyLevel\022\027\n\023IDEMPOTENCY_UN" + - "KNOWN\020\000\022\023\n\017NO_SIDE_EFFECTS\020\001\022\016\n\nIDEMPOTE" + - "NT\020\002*\t\010\350\007\020\200\200\200\200\002\"\236\002\n\023UninterpretedOption\022" + - ";\n\004name\030\002 \003(\0132-.google.protobuf.Uninterp", - "retedOption.NamePart\022\030\n\020identifier_value" + - "\030\003 \001(\t\022\032\n\022positive_int_value\030\004 \001(\004\022\032\n\022ne" + - "gative_int_value\030\005 \001(\003\022\024\n\014double_value\030\006" + - " \001(\001\022\024\n\014string_value\030\007 \001(\014\022\027\n\017aggregate_" + - "value\030\010 \001(\t\0323\n\010NamePart\022\021\n\tname_part\030\001 \002" + - "(\t\022\024\n\014is_extension\030\002 \002(\010\"\325\001\n\016SourceCodeI" + - "nfo\022:\n\010location\030\001 \003(\0132(.google.protobuf." + - "SourceCodeInfo.Location\032\206\001\n\010Location\022\020\n\004" + - "path\030\001 \003(\005B\002\020\001\022\020\n\004span\030\002 \003(\005B\002\020\001\022\030\n\020lead" + - "ing_comments\030\003 \001(\t\022\031\n\021trailing_comments\030", - "\004 \001(\t\022!\n\031leading_detached_comments\030\006 \003(\t" + - "\"\247\001\n\021GeneratedCodeInfo\022A\n\nannotation\030\001 \003" + - "(\0132-.google.protobuf.GeneratedCodeInfo.A" + - "nnotation\032O\n\nAnnotation\022\020\n\004path\030\001 \003(\005B\002\020" + - "\001\022\023\n\013source_file\030\002 \001(\t\022\r\n\005begin\030\003 \001(\005\022\013\n" + - "\003end\030\004 \001(\005B\214\001\n\023com.google.protobufB\020Desc" + - "riptorProtosH\001Z>github.com/golang/protob" + - "uf/protoc-gen-go/descriptor;descriptor\242\002" + - "\003GPB\252\002\032Google.Protobuf.Reflection" + "ninterpretedOption*\t\010\350\007\020\200\200\200\200\002\"\255\002\n\rMethod" + + "Options\022\031\n\ndeprecated\030! \001(\010:\005false\022_\n\021id" + + "empotency_level\030\" \001(\0162/.google.protobuf." + + "MethodOptions.IdempotencyLevel:\023IDEMPOTE" + + "NCY_UNKNOWN\022C\n\024uninterpreted_option\030\347\007 \003" + + "(\0132$.google.protobuf.UninterpretedOption" + + "\"P\n\020IdempotencyLevel\022\027\n\023IDEMPOTENCY_UNKN" + + "OWN\020\000\022\023\n\017NO_SIDE_EFFECTS\020\001\022\016\n\nIDEMPOTENT" + + "\020\002*\t\010\350\007\020\200\200\200\200\002\"\236\002\n\023UninterpretedOption\022;\n", + "\004name\030\002 \003(\0132-.google.protobuf.Uninterpre" + + "tedOption.NamePart\022\030\n\020identifier_value\030\003" + + " \001(\t\022\032\n\022positive_int_value\030\004 \001(\004\022\032\n\022nega" + + "tive_int_value\030\005 \001(\003\022\024\n\014double_value\030\006 \001" + + "(\001\022\024\n\014string_value\030\007 \001(\014\022\027\n\017aggregate_va" + + "lue\030\010 \001(\t\0323\n\010NamePart\022\021\n\tname_part\030\001 \002(\t" + + "\022\024\n\014is_extension\030\002 \002(\010\"\325\001\n\016SourceCodeInf" + + "o\022:\n\010location\030\001 \003(\0132(.google.protobuf.So" + + "urceCodeInfo.Location\032\206\001\n\010Location\022\020\n\004pa" + + "th\030\001 \003(\005B\002\020\001\022\020\n\004span\030\002 \003(\005B\002\020\001\022\030\n\020leadin", + "g_comments\030\003 \001(\t\022\031\n\021trailing_comments\030\004 " + + "\001(\t\022!\n\031leading_detached_comments\030\006 \003(\t\"\247" + + "\001\n\021GeneratedCodeInfo\022A\n\nannotation\030\001 \003(\013" + + "2-.google.protobuf.GeneratedCodeInfo.Ann" + + "otation\032O\n\nAnnotation\022\020\n\004path\030\001 \003(\005B\002\020\001\022" + + "\023\n\013source_file\030\002 \001(\t\022\r\n\005begin\030\003 \001(\005\022\013\n\003e" + + "nd\030\004 \001(\005B\214\001\n\023com.google.protobufB\020Descri" + + "ptorProtosH\001Z>github.com/golang/protobuf" + + "/protoc-gen-go/descriptor;descriptor\242\002\003G" + + "PB\252\002\032Google.Protobuf.Reflection" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -39056,7 +39557,7 @@ public final class DescriptorProtos { internal_static_google_protobuf_FileOptions_fieldAccessorTable = new org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_protobuf_FileOptions_descriptor, - new java.lang.String[] { "JavaPackage", "JavaOuterClassname", "JavaMultipleFiles", "JavaGenerateEqualsAndHash", "JavaStringCheckUtf8", "OptimizeFor", "GoPackage", "CcGenericServices", "JavaGenericServices", "PyGenericServices", "Deprecated", "CcEnableArenas", "ObjcClassPrefix", "CsharpNamespace", "SwiftPrefix", "UninterpretedOption", }); + new java.lang.String[] { "JavaPackage", "JavaOuterClassname", "JavaMultipleFiles", "JavaGenerateEqualsAndHash", "JavaStringCheckUtf8", "OptimizeFor", "GoPackage", "CcGenericServices", "JavaGenericServices", "PyGenericServices", "Deprecated", "CcEnableArenas", "ObjcClassPrefix", "CsharpNamespace", "SwiftPrefix", "PhpClassPrefix", "UninterpretedOption", }); internal_static_google_protobuf_MessageOptions_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_protobuf_MessageOptions_fieldAccessorTable = new diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Descriptors.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Descriptors.java index 62ccd197d1..efdee3baa0 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Descriptors.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Descriptors.java @@ -30,9 +30,10 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; + import org.apache.hadoop.hbase.shaded.com.google.protobuf.DescriptorProtos.*; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.Syntax; - import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; @@ -682,9 +683,7 @@ public final class Descriptors { /** Determines if the given field name is reserved. */ public boolean isReservedName(final String name) { - if (name == null) { - throw new NullPointerException(); - } + checkNotNull(name); for (final String reservedName : proto.getReservedNameList()) { if (reservedName.equals(name)) { return true; diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java index 88effb2869..2f30fe41ed 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleArrayList.java @@ -30,8 +30,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.DoubleList; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.DoubleList; import java.util.Arrays; import java.util.Collection; import java.util.RandomAccess; @@ -41,9 +42,8 @@ import java.util.RandomAccess; * * @author dweis@google.com (Daniel Weis) */ -final class DoubleArrayList - extends AbstractProtobufList - implements DoubleList, RandomAccess { +final class DoubleArrayList extends AbstractProtobufList + implements DoubleList, RandomAccess, PrimitiveNonBoxingCollection { private static final DoubleArrayList EMPTY_LIST = new DoubleArrayList(); static { @@ -199,9 +199,7 @@ final class DoubleArrayList public boolean addAll(Collection collection) { ensureIsMutable(); - if (collection == null) { - throw new NullPointerException(); - } + checkNotNull(collection); // We specialize when adding another DoubleArrayList to avoid boxing elements. if (!(collection instanceof DoubleArrayList)) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java index 98a0ecce1c..be9fc4cd8e 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DoubleValue.java @@ -154,6 +154,17 @@ public final class DoubleValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.DoubleValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java index c6736efce3..b644edbc23 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Duration.java @@ -11,6 +11,7 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; * or "month". It is related to Timestamp in that the difference between * two Timestamp values is a Duration and it can be added or subtracted * from a Timestamp. Range is approximately +-10,000 years. + * # Examples * Example 1: Compute Duration from two Timestamps in pseudo code. * Timestamp start = ...; * Timestamp end = ...; @@ -41,6 +42,14 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; * td = datetime.timedelta(days=3, minutes=10) * duration = Duration() * duration.FromTimedelta(td) + * # JSON Mapping + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". * * * Protobuf type {@code google.protobuf.Duration} @@ -121,7 +130,8 @@ public final class Duration extends /** *
    * Signed seconds of the span of time. Must be from -315,576,000,000
-   * to +315,576,000,000 inclusive.
+   * to +315,576,000,000 inclusive. Note: these bounds are computed from:
+   * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
    * 
* * int64 seconds = 1; @@ -222,6 +232,17 @@ public final class Duration extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -306,6 +327,7 @@ public final class Duration extends * or "month". It is related to Timestamp in that the difference between * two Timestamp values is a Duration and it can be added or subtracted * from a Timestamp. Range is approximately +-10,000 years. + * # Examples * Example 1: Compute Duration from two Timestamps in pseudo code. * Timestamp start = ...; * Timestamp end = ...; @@ -336,6 +358,14 @@ public final class Duration extends * td = datetime.timedelta(days=3, minutes=10) * duration = Duration() * duration.FromTimedelta(td) + * # JSON Mapping + * In JSON format, the Duration type is encoded as a string rather than an + * object, where the string ends in the suffix "s" (indicating seconds) and + * is preceded by the number of seconds, with nanoseconds expressed as + * fractional seconds. For example, 3 seconds with 0 nanoseconds should be + * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + * be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + * microsecond should be expressed in JSON format as "3.000001s". * * * Protobuf type {@code google.protobuf.Duration} @@ -478,7 +508,8 @@ public final class Duration extends /** *
      * Signed seconds of the span of time. Must be from -315,576,000,000
-     * to +315,576,000,000 inclusive.
+     * to +315,576,000,000 inclusive. Note: these bounds are computed from:
+     * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
      * 
* * int64 seconds = 1; @@ -489,7 +520,8 @@ public final class Duration extends /** *
      * Signed seconds of the span of time. Must be from -315,576,000,000
-     * to +315,576,000,000 inclusive.
+     * to +315,576,000,000 inclusive. Note: these bounds are computed from:
+     * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
      * 
* * int64 seconds = 1; @@ -503,7 +535,8 @@ public final class Duration extends /** *
      * Signed seconds of the span of time. Must be from -315,576,000,000
-     * to +315,576,000,000 inclusive.
+     * to +315,576,000,000 inclusive. Note: these bounds are computed from:
+     * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
      * 
* * int64 seconds = 1; diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java index 9c19aaef25..a0f3355cee 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DurationOrBuilder.java @@ -10,7 +10,8 @@ public interface DurationOrBuilder extends /** *
    * Signed seconds of the span of time. Must be from -315,576,000,000
-   * to +315,576,000,000 inclusive.
+   * to +315,576,000,000 inclusive. Note: these bounds are computed from:
+   * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
    * 
* * int64 seconds = 1; diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java index 13bccd6d79..a8e06c55cd 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/DynamicMessage.java @@ -30,11 +30,12 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; + import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor; - import java.io.IOException; import java.io.InputStream; import java.util.Collections; @@ -631,9 +632,7 @@ public final class DynamicMessage extends AbstractMessage { /** Verifies that the value is EnumValueDescriptor and matches Enum Type. */ private void ensureSingularEnumValueDescriptor( FieldDescriptor field, Object value) { - if (value == null) { - throw new NullPointerException(); - } + checkNotNull(value); if (!(value instanceof EnumValueDescriptor)) { throw new IllegalArgumentException( "DynamicMessage should use EnumValueDescriptor to set Enum Value."); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java index 61f1574171..5af955b641 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Empty.java @@ -125,6 +125,17 @@ public final class Empty extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Empty parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java index 3d2a20642c..5c11958633 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Enum.java @@ -445,6 +445,17 @@ public final class Enum extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Enum parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumValue.java index a0a9d4911b..b759a9dcd7 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/EnumValue.java @@ -291,6 +291,17 @@ public final class EnumValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.EnumValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Field.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Field.java index d33fd75c1e..1fc925530f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Field.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Field.java @@ -1173,6 +1173,17 @@ public final class Field extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Field parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Field parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Field parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMask.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMask.java index df42bcabdb..1d8dab7730 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMask.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldMask.java @@ -347,6 +347,17 @@ public final class FieldMask extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FieldMask parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldSet.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldSet.java index a1bf5537da..9deb729d66 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldSet.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FieldSet.java @@ -30,8 +30,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyField.LazyIterator; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.LazyField.LazyIterator; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -385,9 +386,7 @@ final class FieldSet - implements FloatList, RandomAccess { +final class FloatArrayList extends AbstractProtobufList + implements FloatList, RandomAccess, PrimitiveNonBoxingCollection { private static final FloatArrayList EMPTY_LIST = new FloatArrayList(); static { @@ -198,9 +198,7 @@ final class FloatArrayList public boolean addAll(Collection collection) { ensureIsMutable(); - if (collection == null) { - throw new NullPointerException(); - } + checkNotNull(collection); // We specialize when adding another FloatArrayList to avoid boxing elements. if (!(collection instanceof FloatArrayList)) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatValue.java index f3315c8579..c99725217b 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/FloatValue.java @@ -154,6 +154,17 @@ public final class FloatValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.FloatValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessage.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessage.java index 4a0f93a089..06a90dd5e7 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessage.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessage.java @@ -854,6 +854,7 @@ public abstract class GeneratedMessage extends AbstractMessage /** Check if a singular extension is present. */ @Override + @SuppressWarnings("unchecked") public final boolean hasExtension(final ExtensionLite extensionLite) { Extension extension = checkNotLite(extensionLite); @@ -863,6 +864,7 @@ public abstract class GeneratedMessage extends AbstractMessage /** Get the number of elements in a repeated extension. */ @Override + @SuppressWarnings("unchecked") public final int getExtensionCount( final ExtensionLite> extensionLite) { Extension> extension = checkNotLite(extensionLite); @@ -2555,6 +2557,7 @@ public abstract class GeneratedMessage extends AbstractMessage } @Override + @SuppressWarnings("unchecked") public Object get(GeneratedMessage message) { List result = new ArrayList(); for (int i = 0; i < getRepeatedCount(message); i++) { @@ -2564,6 +2567,7 @@ public abstract class GeneratedMessage extends AbstractMessage } @Override + @SuppressWarnings("unchecked") public Object get(Builder builder) { List result = new ArrayList(); for (int i = 0; i < getRepeatedCount(builder); i++) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageLite.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageLite.java index 584eba1a2f..a8f6ef921d 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageLite.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageLite.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Bu import org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageLite.EqualsVisitor.NotEqualsException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.BooleanList; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.DoubleList; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.FloatList; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.IntList; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.LongList; @@ -45,6 +46,7 @@ import java.io.ObjectStreamException; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -136,6 +138,7 @@ public abstract class GeneratedMessageLite< return false; } + try { visit(EqualsVisitor.INSTANCE, (MessageType) other); } catch (NotEqualsException e) { @@ -220,6 +223,7 @@ public abstract class GeneratedMessageLite< } @Override + @SuppressWarnings("unchecked") public final BuilderType toBuilder() { BuilderType builder = (BuilderType) dynamicMethod(MethodToInvoke.NEW_BUILDER); builder.mergeFrom((MessageType) this); @@ -453,6 +457,7 @@ public abstract class GeneratedMessageLite< */ protected FieldSet extensions = FieldSet.newFieldSet(); + @SuppressWarnings("unchecked") protected final void mergeExtensionFields(final MessageType other) { if (extensions.isImmutable()) { extensions = extensions.clone(); @@ -1152,6 +1157,7 @@ public abstract class GeneratedMessageLite< } } + /** * Lite equivalent to {@link GeneratedMessage.GeneratedExtension}. * @@ -1527,6 +1533,20 @@ public abstract class GeneratedMessageLite< // Validates last tag. protected static > T parseFrom( + T defaultInstance, ByteBuffer data, ExtensionRegistryLite extensionRegistry) + throws InvalidProtocolBufferException { + return checkMessageInitialized( + parseFrom(defaultInstance, CodedInputStream.newInstance(data), extensionRegistry)); + } + + // Validates last tag. + protected static > T parseFrom( + T defaultInstance, ByteBuffer data) throws InvalidProtocolBufferException { + return parseFrom(defaultInstance, data, ExtensionRegistryLite.getEmptyRegistry()); + } + + // Validates last tag. + protected static > T parseFrom( T defaultInstance, ByteString data) throws InvalidProtocolBufferException { return checkMessageInitialized( @@ -1977,13 +1997,13 @@ public abstract class GeneratedMessageLite< /** * Implements hashCode by accumulating state. */ - private static class HashCodeVisitor implements Visitor { + static class HashCodeVisitor implements Visitor { // The caller must ensure that the visitor is invoked parameterized with this and this such that // other is this. This is required due to how oneof cases are handled. See the class comment // on Visitor for more information. - private int hashCode = 0; + int hashCode = 0; @Override public boolean visitBoolean( diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageV3.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageV3.java index cc815fc762..35e0d945d7 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageV3.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/GeneratedMessageV3.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescr // class without breaking binary compatibility with old generated code that still subclasses // the old GeneratedMessageV3 class. To allow these different GeneratedMessageV3V? classes to // interoperate (e.g., a GeneratedMessageV3V3 object has a message extension field whose class -// type is GeneratedMessageV3V4), these classes still share a common parent class AbstarctMessage +// type is GeneratedMessageV3V4), these classes still share a common parent class AbstractMessage // and are using the same GeneratedMessage.GeneratedExtension class for extension definitions. // Since this class becomes GeneratedMessageV3V? in opensource, we have to add an import here // to be able to use GeneratedMessage.GeneratedExtension. The GeneratedExtension definition in @@ -866,6 +866,7 @@ public abstract class GeneratedMessageV3 extends AbstractMessage /** Check if a singular extension is present. */ @Override + @SuppressWarnings("unchecked") public final boolean hasExtension(final ExtensionLite extensionLite) { Extension extension = checkNotLite(extensionLite); @@ -875,6 +876,7 @@ public abstract class GeneratedMessageV3 extends AbstractMessage /** Get the number of elements in a repeated extension. */ @Override + @SuppressWarnings("unchecked") public final int getExtensionCount( final ExtensionLite> extensionLite) { Extension> extension = checkNotLite(extensionLite); @@ -2219,6 +2221,7 @@ public abstract class GeneratedMessageV3 extends AbstractMessage } @Override + @SuppressWarnings("unchecked") public Object get(GeneratedMessageV3 message) { List result = new ArrayList(); for (int i = 0; i < getRepeatedCount(message); i++) { @@ -2228,6 +2231,7 @@ public abstract class GeneratedMessageV3 extends AbstractMessage } @Override + @SuppressWarnings("unchecked") public Object get(Builder builder) { List result = new ArrayList(); for (int i = 0; i < getRepeatedCount(builder); i++) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java index 476b0862cb..e535734c50 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int32Value.java @@ -151,6 +151,17 @@ public final class Int32Value extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int32Value parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java index fce2b73702..a0af7c74ca 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Int64Value.java @@ -152,6 +152,17 @@ public final class Int64Value extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Int64Value parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java index ba8b7ec2b3..20807bfa1e 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/IntArrayList.java @@ -30,8 +30,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.IntList; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.IntList; import java.util.Arrays; import java.util.Collection; import java.util.RandomAccess; @@ -41,9 +42,8 @@ import java.util.RandomAccess; * * @author dweis@google.com (Daniel Weis) */ -final class IntArrayList - extends AbstractProtobufList - implements IntList, RandomAccess { +final class IntArrayList extends AbstractProtobufList + implements IntList, RandomAccess, PrimitiveNonBoxingCollection { private static final IntArrayList EMPTY_LIST = new IntArrayList(); static { @@ -198,9 +198,7 @@ final class IntArrayList public boolean addAll(Collection collection) { ensureIsMutable(); - if (collection == null) { - throw new NullPointerException(); - } + checkNotNull(collection); // We specialize when adding another IntArrayList to avoid boxing elements. if (!(collection instanceof IntArrayList)) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java index 8f5c2297c6..d722f87692 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Internal.java @@ -62,6 +62,16 @@ public final class Internal { /** * Throws an appropriate {@link NullPointerException} if the given objects is {@code null}. */ + static T checkNotNull(T obj) { + if (obj == null) { + throw new NullPointerException(); + } + return obj; + } + + /** + * Throws an appropriate {@link NullPointerException} if the given objects is {@code null}. + */ static T checkNotNull(T obj, String message) { if (obj == null) { throw new NullPointerException(message); @@ -420,6 +430,11 @@ public final class Internal { CodedInputStream.newInstance(EMPTY_BYTE_ARRAY); + /** Helper method to merge two MessageLite instances. */ + static Object mergeMessage(Object destination, Object source) { + return ((MessageLite) destination).toBuilder().mergeFrom((MessageLite) source).buildPartial(); + } + /** * Provides an immutable view of {@code List} around a {@code List}. * diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyFieldLite.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyFieldLite.java index 91b70e0cab..58491af329 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyFieldLite.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LazyFieldLite.java @@ -394,6 +394,7 @@ public class LazyFieldLite { } } + /** * Might lazily parse the bytes that were previously passed in. Is thread-safe. */ diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ListValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ListValue.java index 4ddc71b443..46cb286e8c 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ListValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/ListValue.java @@ -202,6 +202,17 @@ public final class ListValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.ListValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LongArrayList.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LongArrayList.java index 39d14b2f49..b30dad6cf8 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LongArrayList.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/LongArrayList.java @@ -30,8 +30,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.LongList; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.LongList; import java.util.Arrays; import java.util.Collection; import java.util.RandomAccess; @@ -41,9 +42,8 @@ import java.util.RandomAccess; * * @author dweis@google.com (Daniel Weis) */ -final class LongArrayList - extends AbstractProtobufList - implements LongList, RandomAccess { +final class LongArrayList extends AbstractProtobufList + implements LongList, RandomAccess, PrimitiveNonBoxingCollection { private static final LongArrayList EMPTY_LIST = new LongArrayList(); static { @@ -198,9 +198,7 @@ final class LongArrayList public boolean addAll(Collection collection) { ensureIsMutable(); - if (collection == null) { - throw new NullPointerException(); - } + checkNotNull(collection); // We specialize when adding another LongArrayList to avoid boxing elements. if (!(collection instanceof LongArrayList)) { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntry.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntry.java index d586cc79b7..8f55cc3289 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntry.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntry.java @@ -33,7 +33,6 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor; - import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -89,6 +88,7 @@ public final class MapEntry extends AbstractMessage { } /** Create a MapEntry with the provided key and value. */ + @SuppressWarnings("unchecked") private MapEntry(Metadata metadata, K key, V value) { this.key = key; this.value = value; @@ -170,7 +170,7 @@ public final class MapEntry extends AbstractMessage { @Override public Builder toBuilder() { - return new Builder(metadata, key, value); + return new Builder(metadata, key, value, true, true); } @Override @@ -246,15 +246,19 @@ public final class MapEntry extends AbstractMessage { private final Metadata metadata; private K key; private V value; + private boolean hasKey; + private boolean hasValue; private Builder(Metadata metadata) { - this(metadata, metadata.defaultKey, metadata.defaultValue); + this(metadata, metadata.defaultKey, metadata.defaultValue, false, false); } - private Builder(Metadata metadata, K key, V value) { + private Builder(Metadata metadata, K key, V value, boolean hasKey, boolean hasValue) { this.metadata = metadata; this.key = key; this.value = value; + this.hasKey = hasKey; + this.hasValue = hasValue; } public K getKey() { @@ -267,21 +271,25 @@ public final class MapEntry extends AbstractMessage { public Builder setKey(K key) { this.key = key; + this.hasKey = true; return this; } public Builder clearKey() { this.key = metadata.defaultKey; + this.hasKey = false; return this; } public Builder setValue(V value) { this.value = value; + this.hasValue = true; return this; } public Builder clearValue() { this.value = metadata.defaultValue; + this.hasValue = false; return this; } @@ -403,7 +411,7 @@ public final class MapEntry extends AbstractMessage { @Override public boolean hasField(FieldDescriptor field) { checkFieldDescriptor(field); - return true; + return field.getNumber() == 1 ? hasKey : hasValue; } @Override @@ -435,8 +443,9 @@ public final class MapEntry extends AbstractMessage { } @Override + @SuppressWarnings("unchecked") public Builder clone() { - return new Builder(metadata, key, value); + return new Builder(metadata, key, value, hasKey, hasValue); } } @@ -446,4 +455,9 @@ public final class MapEntry extends AbstractMessage { } return true; } + + /** Returns the metadata only for experimental runtime. */ + final Metadata getMetadata() { + return metadata; + } } diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntryLite.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntryLite.java index d8a19bbb0b..9ded61dd6d 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntryLite.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapEntryLite.java @@ -223,4 +223,9 @@ public class MapEntryLite { input.popLimit(oldLimit); map.put(key, value); } + + /** For experimental runtime internal use only. */ + Metadata getMetadata() { + return metadata; + } } diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapField.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapField.java index 42905d266f..47b3d33bab 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapField.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapField.java @@ -30,6 +30,8 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; + import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -100,6 +102,7 @@ public class MapField implements MutabilityOracle { } @Override + @SuppressWarnings("unchecked") public void convertMessageToKeyAndValue(Message message, Map map) { MapEntry entry = (MapEntry) message; map.put(entry.getKey(), entry.getValue()); @@ -328,6 +331,8 @@ public class MapField implements MutabilityOracle { @Override public V put(K key, V value) { mutabilityOracle.ensureMutable(); + checkNotNull(key); + checkNotNull(value); return delegate.put(key, value); } @@ -340,6 +345,10 @@ public class MapField implements MutabilityOracle { @Override public void putAll(Map m) { mutabilityOracle.ensureMutable(); + for (K key : m.keySet()) { + checkNotNull(key); + checkNotNull(m.get(key)); + } delegate.putAll(m); } diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapFieldLite.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapFieldLite.java index 761c4cec4a..b5401a91dc 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapFieldLite.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/MapFieldLite.java @@ -30,8 +30,9 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; -import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLite; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; +import org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLite; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -88,6 +89,9 @@ public final class MapFieldLite extends LinkedHashMap { @Override public V put(K key, V value) { ensureMutable(); + checkNotNull(key); + + checkNotNull(value); return super.put(key, value); } @@ -97,6 +101,7 @@ public final class MapFieldLite extends LinkedHashMap { @Override public void putAll(Map m) { ensureMutable(); + checkForNullKeysAndValues(m); super.putAll(m); } @@ -105,6 +110,13 @@ public final class MapFieldLite extends LinkedHashMap { return super.remove(key); } + private static void checkForNullKeysAndValues(Map m) { + for (Object key : m.keySet()) { + checkNotNull(key); + checkNotNull(m.get(key)); + } + } + private static boolean equals(Object a, Object b) { if (a instanceof byte[] && b instanceof byte[]) { return Arrays.equals((byte[]) a, (byte[]) b); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Method.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Method.java index 32e8b35d81..cd700f5a0c 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Method.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Method.java @@ -482,6 +482,17 @@ public final class Method extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Method parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Method parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Method parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Mixin.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Mixin.java index 5dd6be6774..b81de0e770 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Mixin.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Mixin.java @@ -300,6 +300,17 @@ public final class Mixin extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Mixin parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Option.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Option.java index cfd28c9ef2..f40186466d 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Option.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Option.java @@ -257,6 +257,17 @@ public final class Option extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Option parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Option parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Option parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Parser.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Parser.java index f29b60b3fc..5871d72b0d 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Parser.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Parser.java @@ -31,6 +31,7 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; import java.io.InputStream; +import java.nio.ByteBuffer; /** * Abstract interface for parsing Protocol Messages. @@ -93,6 +94,18 @@ public interface Parser { // Convenience methods. /** + * Parses {@code data} as a message of {@code MessageType}. This is just a small wrapper around + * {@link #parseFrom(CodedInputStream)}. + */ + public MessageType parseFrom(ByteBuffer data) throws InvalidProtocolBufferException; + + /** + * Parses {@code data} as a message of {@code MessageType}. This is just a small wrapper around + * {@link #parseFrom(CodedInputStream, ExtensionRegistryLite)}. + */ + public MessageType parseFrom(ByteBuffer data, ExtensionRegistryLite extensionRegistry) + throws InvalidProtocolBufferException; + /** * Parses {@code data} as a message of {@code MessageType}. * This is just a small wrapper around {@link #parseFrom(CodedInputStream)}. */ diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/PrimitiveNonBoxingCollection.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/PrimitiveNonBoxingCollection.java new file mode 100644 index 0000000000..30a075fb0a --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/PrimitiveNonBoxingCollection.java @@ -0,0 +1,34 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package org.apache.hadoop.hbase.shaded.com.google.protobuf; + +/** A marker interface indicating that the collection supports primitives and is non-boxing. */ +interface PrimitiveNonBoxingCollection {} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RepeatedFieldBuilderV3.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RepeatedFieldBuilderV3.java index 9963e05f77..92c63fc508 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RepeatedFieldBuilderV3.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/RepeatedFieldBuilderV3.java @@ -30,6 +30,8 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; + import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; @@ -290,9 +292,7 @@ public class RepeatedFieldBuilderV3 */ public RepeatedFieldBuilderV3 setMessage( int index, MType message) { - if (message == null) { - throw new NullPointerException(); - } + checkNotNull(message); ensureMutableMessageList(); messages.set(index, message); if (builders != null) { @@ -315,9 +315,7 @@ public class RepeatedFieldBuilderV3 */ public RepeatedFieldBuilderV3 addMessage( MType message) { - if (message == null) { - throw new NullPointerException(); - } + checkNotNull(message); ensureMutableMessageList(); messages.add(message); if (builders != null) { @@ -339,9 +337,7 @@ public class RepeatedFieldBuilderV3 */ public RepeatedFieldBuilderV3 addMessage( int index, MType message) { - if (message == null) { - throw new NullPointerException(); - } + checkNotNull(message); ensureMutableMessageList(); messages.add(index, message); if (builders != null) { @@ -363,9 +359,7 @@ public class RepeatedFieldBuilderV3 public RepeatedFieldBuilderV3 addAllMessages( Iterable values) { for (final MType value : values) { - if (value == null) { - throw new NullPointerException(); - } + checkNotNull(value); } // If we can inspect the size, we can more efficiently add messages. diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java index 0a294dcb6e..b26eccf5d2 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SingleFieldBuilderV3.java @@ -30,6 +30,8 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; +import static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.checkNotNull; + /** * {@code SingleFieldBuilderV3} implements a structure that a protocol * message uses to hold a single field of another protocol message. It supports @@ -84,10 +86,7 @@ public class SingleFieldBuilderV3 MType message, AbstractMessage.BuilderParent parent, boolean isClean) { - if (message == null) { - throw new NullPointerException(); - } - this.message = message; + this.message = checkNotNull(message); this.parent = parent; this.isClean = isClean; } @@ -169,10 +168,7 @@ public class SingleFieldBuilderV3 */ public SingleFieldBuilderV3 setMessage( MType message) { - if (message == null) { - throw new NullPointerException(); - } - this.message = message; + this.message = checkNotNull(message); if (builder != null) { builder.dispose(); builder = null; diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java index ab4938cefb..5c5d005e60 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/SourceContext.java @@ -182,6 +182,17 @@ public final class SourceContext extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.SourceContext parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java index f3e2ef6a0c..99a3d5ad91 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/StringValue.java @@ -180,6 +180,17 @@ public final class StringValue extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.StringValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java index 2607900a66..6876791535 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Struct.java @@ -264,6 +264,17 @@ public final class Struct extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Struct parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormat.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormat.java index 491089b406..4043b732b5 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormat.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/TextFormat.java @@ -1442,7 +1442,7 @@ public final class TextFormat { /** * Parse a single field from {@code tokenizer} and merge it into - * {@code builder}. + * {@code target}. */ private void mergeField(final Tokenizer tokenizer, final ExtensionRegistry extensionRegistry, @@ -1712,6 +1712,8 @@ public final class TextFormat { } if (field.isRepeated()) { + // TODO(b/29122459): If field.isMapField() and FORBID_SINGULAR_OVERWRITES mode, + // check for duplicate map keys here. target.addRepeatedField(field, value); } else if ((singularOverwritePolicy == SingularOverwritePolicy.FORBID_SINGULAR_OVERWRITES) diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Timestamp.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Timestamp.java index 0023d505f6..bcaa88520f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Timestamp.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Timestamp.java @@ -16,6 +16,7 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; * By restricting to that range, we ensure that we can convert to * and from RFC 3339 date strings. * See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). + * # Examples * Example 1: Compute Timestamp from POSIX `time()`. * Timestamp timestamp; * timestamp.set_seconds(time(NULL)); @@ -42,6 +43,25 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; * Example 5: Compute Timestamp from current time in Python. * timestamp = Timestamp() * timestamp.GetCurrentTime() + * # JSON Mapping + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required, though only UTC (as indicated by "Z") is presently supported. + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * In JavaScript, one can convert a Date object to this format using the + * standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) + * with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one + * can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) + * to obtain a formatter capable of generating timestamps in this format. * * * Protobuf type {@code google.protobuf.Timestamp} @@ -222,6 +242,17 @@ public final class Timestamp extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Timestamp parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Timestamp parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Timestamp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -311,6 +342,7 @@ public final class Timestamp extends * By restricting to that range, we ensure that we can convert to * and from RFC 3339 date strings. * See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). + * # Examples * Example 1: Compute Timestamp from POSIX `time()`. * Timestamp timestamp; * timestamp.set_seconds(time(NULL)); @@ -337,6 +369,25 @@ public final class Timestamp extends * Example 5: Compute Timestamp from current time in Python. * timestamp = Timestamp() * timestamp.GetCurrentTime() + * # JSON Mapping + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required, though only UTC (as indicated by "Z") is presently supported. + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * In JavaScript, one can convert a Date object to this format using the + * standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) + * with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one + * can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()) + * to obtain a formatter capable of generating timestamps in this format. * * * Protobuf type {@code google.protobuf.Timestamp} diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Type.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Type.java index f46f351879..6a64265e6e 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Type.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Type.java @@ -520,6 +520,17 @@ public final class Type extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Type parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Type parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Type parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java index 7b4dae9aab..a44447dabc 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt32Value.java @@ -151,6 +151,17 @@ public final class UInt32Value extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt32Value parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java index 578a6ff7c8..16c16b53c3 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UInt64Value.java @@ -152,6 +152,17 @@ public final class UInt64Value extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.UInt64Value parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java index 148a3dd82b..78352b9053 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnknownFieldSet.java @@ -91,7 +91,7 @@ public final class UnknownFieldSet implements MessageLite { * Construct an {@code UnknownFieldSet} around the given map. The map is * expected to be immutable. */ - private UnknownFieldSet(final Map fields, + UnknownFieldSet(final Map fields, final Map fieldsDescending) { this.fields = fields; } @@ -715,7 +715,7 @@ public final class UnknownFieldSet implements MessageLite { * @see UnknownFieldSet */ public static final class Field { - private Field() {} + Field() {} /** Construct a new {@link Builder}. */ public static Builder newBuilder() { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeUtil.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeUtil.java index bb4d8d68ba..11f599cd8f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeUtil.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/UnsafeUtil.java @@ -33,19 +33,23 @@ package org.apache.hadoop.hbase.shaded.com.google.protobuf; import java.lang.reflect.Field; import java.nio.Buffer; import java.nio.ByteBuffer; +import java.nio.ByteOrder; import java.security.AccessController; import java.security.PrivilegedExceptionAction; -import sun.misc.Unsafe; +import java.util.logging.Level; +import java.util.logging.Logger; /** Utility class for working with unsafe operations. */ -// TODO(nathanmittler): Add support for Android Memory/MemoryBlock final class UnsafeUtil { + private static final Logger logger = Logger.getLogger(UnsafeUtil.class.getName()); private static final sun.misc.Unsafe UNSAFE = getUnsafe(); + private static final MemoryAccessor MEMORY_ACCESSOR = getMemoryAccessor(); private static final boolean HAS_UNSAFE_BYTEBUFFER_OPERATIONS = supportsUnsafeByteBufferOperations(); private static final boolean HAS_UNSAFE_ARRAY_OPERATIONS = supportsUnsafeArrayOperations(); + private static final boolean HAS_UNSAFE_COPY_MEMORY = supportsUnsafeCopyMemory(); private static final long ARRAY_BASE_OFFSET = byteArrayBaseOffset(); - private static final long BUFFER_ADDRESS_OFFSET = fieldOffset(field(Buffer.class, "address")); + private static final long BUFFER_ADDRESS_OFFSET = fieldOffset(bufferAddressField()); private UnsafeUtil() {} @@ -53,20 +57,16 @@ final class UnsafeUtil { return HAS_UNSAFE_ARRAY_OPERATIONS; } - static boolean hasUnsafeByteBufferOperations() { - return HAS_UNSAFE_BYTEBUFFER_OPERATIONS; + static boolean hasUnsafeCopyMemory() { + return HAS_UNSAFE_COPY_MEMORY; } - static Object allocateInstance(Class clazz) { - try { - return UNSAFE.allocateInstance(clazz); - } catch (InstantiationException e) { - throw new RuntimeException(e); - } + static boolean hasUnsafeByteBufferOperations() { + return HAS_UNSAFE_BYTEBUFFER_OPERATIONS; } static long objectFieldOffset(Field field) { - return UNSAFE.objectFieldOffset(field); + return MEMORY_ACCESSOR.objectFieldOffset(field); } static long getArrayBaseOffset() { @@ -74,103 +74,103 @@ final class UnsafeUtil { } static byte getByte(Object target, long offset) { - return UNSAFE.getByte(target, offset); + return MEMORY_ACCESSOR.getByte(target, offset); } static void putByte(Object target, long offset, byte value) { - UNSAFE.putByte(target, offset, value); + MEMORY_ACCESSOR.putByte(target, offset, value); } static int getInt(Object target, long offset) { - return UNSAFE.getInt(target, offset); + return MEMORY_ACCESSOR.getInt(target, offset); } static void putInt(Object target, long offset, int value) { - UNSAFE.putInt(target, offset, value); + MEMORY_ACCESSOR.putInt(target, offset, value); } static long getLong(Object target, long offset) { - return UNSAFE.getLong(target, offset); + return MEMORY_ACCESSOR.getLong(target, offset); } static void putLong(Object target, long offset, long value) { - UNSAFE.putLong(target, offset, value); + MEMORY_ACCESSOR.putLong(target, offset, value); } static boolean getBoolean(Object target, long offset) { - return UNSAFE.getBoolean(target, offset); + return MEMORY_ACCESSOR.getBoolean(target, offset); } static void putBoolean(Object target, long offset, boolean value) { - UNSAFE.putBoolean(target, offset, value); + MEMORY_ACCESSOR.putBoolean(target, offset, value); } static float getFloat(Object target, long offset) { - return UNSAFE.getFloat(target, offset); + return MEMORY_ACCESSOR.getFloat(target, offset); } static void putFloat(Object target, long offset, float value) { - UNSAFE.putFloat(target, offset, value); + MEMORY_ACCESSOR.putFloat(target, offset, value); } static double getDouble(Object target, long offset) { - return UNSAFE.getDouble(target, offset); + return MEMORY_ACCESSOR.getDouble(target, offset); } static void putDouble(Object target, long offset, double value) { - UNSAFE.putDouble(target, offset, value); + MEMORY_ACCESSOR.putDouble(target, offset, value); } static Object getObject(Object target, long offset) { - return UNSAFE.getObject(target, offset); + return MEMORY_ACCESSOR.getObject(target, offset); } static void putObject(Object target, long offset, Object value) { - UNSAFE.putObject(target, offset, value); + MEMORY_ACCESSOR.putObject(target, offset, value); } static void copyMemory( Object src, long srcOffset, Object target, long targetOffset, long length) { - UNSAFE.copyMemory(src, srcOffset, target, targetOffset, length); + MEMORY_ACCESSOR.copyMemory(src, srcOffset, target, targetOffset, length); } static byte getByte(long address) { - return UNSAFE.getByte(address); + return MEMORY_ACCESSOR.getByte(address); } static void putByte(long address, byte value) { - UNSAFE.putByte(address, value); + MEMORY_ACCESSOR.putByte(address, value); } static int getInt(long address) { - return UNSAFE.getInt(address); + return MEMORY_ACCESSOR.getInt(address); } static void putInt(long address, int value) { - UNSAFE.putInt(address, value); + MEMORY_ACCESSOR.putInt(address, value); } static long getLong(long address) { - return UNSAFE.getLong(address); + return MEMORY_ACCESSOR.getLong(address); } static void putLong(long address, long value) { - UNSAFE.putLong(address, value); + MEMORY_ACCESSOR.putLong(address, value); } static void copyMemory(long srcAddress, long targetAddress, long length) { - UNSAFE.copyMemory(srcAddress, targetAddress, length); - } - - static void setMemory(long address, long numBytes, byte value) { - UNSAFE.setMemory(address, numBytes, value); + MEMORY_ACCESSOR.copyMemory(srcAddress, targetAddress, length); } /** * Gets the offset of the {@code address} field of the given direct {@link ByteBuffer}. */ static long addressOffset(ByteBuffer buffer) { - return UNSAFE.getLong(buffer, BUFFER_ADDRESS_OFFSET); + return MEMORY_ACCESSOR.getLong(buffer, BUFFER_ADDRESS_OFFSET); + } + + static Object getStaticObject(Field field) { + return MEMORY_ACCESSOR.getStaticObject(field); } /** @@ -181,7 +181,7 @@ final class UnsafeUtil { try { unsafe = AccessController.doPrivileged( - new PrivilegedExceptionAction() { + new PrivilegedExceptionAction() { @Override public sun.misc.Unsafe run() throws Exception { Class k = sun.misc.Unsafe.class; @@ -204,69 +204,114 @@ final class UnsafeUtil { return unsafe; } + /** Get a {@link MemoryAccessor} appropriate for the platform, or null if not supported. */ + private static MemoryAccessor getMemoryAccessor() { + if (UNSAFE == null) { + return null; + } + return new JvmMemoryAccessor(UNSAFE); + } + /** Indicates whether or not unsafe array operations are supported on this platform. */ private static boolean supportsUnsafeArrayOperations() { - boolean supported = false; - if (UNSAFE != null) { - try { - Class clazz = UNSAFE.getClass(); - clazz.getMethod("objectFieldOffset", Field.class); - clazz.getMethod("allocateInstance", Class.class); - clazz.getMethod("arrayBaseOffset", Class.class); - clazz.getMethod("getByte", Object.class, long.class); - clazz.getMethod("putByte", Object.class, long.class, byte.class); - clazz.getMethod("getBoolean", Object.class, long.class); - clazz.getMethod("putBoolean", Object.class, long.class, boolean.class); - clazz.getMethod("getInt", Object.class, long.class); - clazz.getMethod("putInt", Object.class, long.class, int.class); - clazz.getMethod("getLong", Object.class, long.class); - clazz.getMethod("putLong", Object.class, long.class, long.class); - clazz.getMethod("getFloat", Object.class, long.class); - clazz.getMethod("putFloat", Object.class, long.class, float.class); - clazz.getMethod("getDouble", Object.class, long.class); - clazz.getMethod("putDouble", Object.class, long.class, double.class); - clazz.getMethod("getObject", Object.class, long.class); - clazz.getMethod("putObject", Object.class, long.class, Object.class); - clazz.getMethod( - "copyMemory", Object.class, long.class, Object.class, long.class, long.class); - supported = true; - } catch (Throwable e) { - // Do nothing. - } - } - return supported; + if (UNSAFE == null) { + return false; + } + try { + Class clazz = UNSAFE.getClass(); + clazz.getMethod("objectFieldOffset", Field.class); + clazz.getMethod("arrayBaseOffset", Class.class); + clazz.getMethod("getInt", Object.class, long.class); + clazz.getMethod("putInt", Object.class, long.class, int.class); + clazz.getMethod("getLong", Object.class, long.class); + clazz.getMethod("putLong", Object.class, long.class, long.class); + clazz.getMethod("getObject", Object.class, long.class); + clazz.getMethod("putObject", Object.class, long.class, Object.class); + clazz.getMethod("getByte", Object.class, long.class); + clazz.getMethod("putByte", Object.class, long.class, byte.class); + clazz.getMethod("getBoolean", Object.class, long.class); + clazz.getMethod("putBoolean", Object.class, long.class, boolean.class); + clazz.getMethod("getFloat", Object.class, long.class); + clazz.getMethod("putFloat", Object.class, long.class, float.class); + clazz.getMethod("getDouble", Object.class, long.class); + clazz.getMethod("putDouble", Object.class, long.class, double.class); + + return true; + } catch (Throwable e) { + logger.log( + Level.WARNING, + "platform method missing - proto runtime falling back to safer methods: " + e); + } + return false; + } + + /** + * Indicates whether or not unsafe copyMemory(object, long, object, long, long) operations are + * supported on this platform. + */ + private static boolean supportsUnsafeCopyMemory() { + if (UNSAFE == null) { + return false; + } + try { + Class clazz = UNSAFE.getClass(); + clazz.getMethod("copyMemory", Object.class, long.class, Object.class, long.class, long.class); + + return true; + } catch (Throwable e) { + logger.log( + Level.WARNING, + "copyMemory is missing from platform - proto runtime falling back to safer methods."); + } + return false; } private static boolean supportsUnsafeByteBufferOperations() { - boolean supported = false; - if (UNSAFE != null) { - try { - Class clazz = UNSAFE.getClass(); - // Methods for getting direct buffer address. - clazz.getMethod("objectFieldOffset", Field.class); - clazz.getMethod("getLong", Object.class, long.class); - - clazz.getMethod("getByte", long.class); - clazz.getMethod("putByte", long.class, byte.class); - clazz.getMethod("getInt", long.class); - clazz.getMethod("putInt", long.class, int.class); - clazz.getMethod("getLong", long.class); - clazz.getMethod("putLong", long.class, long.class); - clazz.getMethod("setMemory", long.class, long.class, byte.class); - clazz.getMethod("copyMemory", long.class, long.class, long.class); - supported = true; - } catch (Throwable e) { - // Do nothing. - } - } - return supported; + if (UNSAFE == null) { + return false; + } + try { + Class clazz = UNSAFE.getClass(); + // Methods for getting direct buffer address. + clazz.getMethod("objectFieldOffset", Field.class); + clazz.getMethod("getLong", Object.class, long.class); + + clazz.getMethod("getByte", long.class); + clazz.getMethod("putByte", long.class, byte.class); + clazz.getMethod("getInt", long.class); + clazz.getMethod("putInt", long.class, int.class); + clazz.getMethod("getLong", long.class); + clazz.getMethod("putLong", long.class, long.class); + clazz.getMethod("copyMemory", long.class, long.class, long.class); + return true; + } catch (Throwable e) { + logger.log( + Level.WARNING, + "platform method missing - proto runtime falling back to safer methods: " + e); + } + return false; + } + + + @SuppressWarnings("unchecked") + private static Class getClassForName(String name) { + try { + return (Class) Class.forName(name); + } catch (Throwable e) { + return null; + } + } + + /** Finds the address field within a direct {@link Buffer}. */ + private static Field bufferAddressField() { + return field(Buffer.class, "address"); } /** * Get the base offset for byte arrays, or {@code -1} if {@code sun.misc.Unsafe} is not available. */ private static int byteArrayBaseOffset() { - return HAS_UNSAFE_ARRAY_OPERATIONS ? UNSAFE.arrayBaseOffset(byte[].class) : -1; + return HAS_UNSAFE_ARRAY_OPERATIONS ? MEMORY_ACCESSOR.arrayBaseOffset(byte[].class) : -1; } /** @@ -274,7 +319,7 @@ final class UnsafeUtil { * available. */ private static long fieldOffset(Field field) { - return field == null || UNSAFE == null ? -1 : UNSAFE.objectFieldOffset(field); + return field == null || MEMORY_ACCESSOR == null ? -1 : MEMORY_ACCESSOR.objectFieldOffset(field); } /** @@ -292,4 +337,174 @@ final class UnsafeUtil { } return field; } + + private abstract static class MemoryAccessor { + + sun.misc.Unsafe unsafe; + + MemoryAccessor(sun.misc.Unsafe unsafe) { + this.unsafe = unsafe; + } + + public final long objectFieldOffset(Field field) { + return unsafe.objectFieldOffset(field); + } + + public abstract byte getByte(Object target, long offset); + + public abstract void putByte(Object target, long offset, byte value); + + public final int getInt(Object target, long offset) { + return unsafe.getInt(target, offset); + } + + public final void putInt(Object target, long offset, int value) { + unsafe.putInt(target, offset, value); + } + + public final long getLong(Object target, long offset) { + return unsafe.getLong(target, offset); + } + + public final void putLong(Object target, long offset, long value) { + unsafe.putLong(target, offset, value); + } + + public abstract boolean getBoolean(Object target, long offset); + + public abstract void putBoolean(Object target, long offset, boolean value); + + public abstract float getFloat(Object target, long offset); + + public abstract void putFloat(Object target, long offset, float value); + + public abstract double getDouble(Object target, long offset); + + public abstract void putDouble(Object target, long offset, double value); + + public final Object getObject(Object target, long offset) { + return unsafe.getObject(target, offset); + } + + public final void putObject(Object target, long offset, Object value) { + unsafe.putObject(target, offset, value); + } + + public final int arrayBaseOffset(Class clazz) { + return unsafe.arrayBaseOffset(clazz); + } + + public abstract byte getByte(long address); + + public abstract void putByte(long address, byte value); + + public abstract int getInt(long address); + + public abstract void putInt(long address, int value); + + public abstract long getLong(long address); + + public abstract void putLong(long address, long value); + + public abstract void copyMemory(long srcAddress, long targetAddress, long length); + + public abstract void copyMemory( + Object src, long srcOffset, Object target, long targetOffset, long length); + + public abstract Object getStaticObject(Field field); + } + + private static final class JvmMemoryAccessor extends MemoryAccessor { + + JvmMemoryAccessor(sun.misc.Unsafe unsafe) { + super(unsafe); + } + + @Override + public byte getByte(long address) { + return unsafe.getByte(address); + } + + @Override + public void putByte(long address, byte value) { + unsafe.putByte(address, value); + } + + @Override + public int getInt(long address) { + return unsafe.getInt(address); + } + + @Override + public void putInt(long address, int value) { + unsafe.putInt(address, value); + } + + @Override + public long getLong(long address) { + return unsafe.getLong(address); + } + + @Override + public void putLong(long address, long value) { + unsafe.putLong(address, value); + } + + @Override + public byte getByte(Object target, long offset) { + return unsafe.getByte(target, offset); + } + + @Override + public void putByte(Object target, long offset, byte value) { + unsafe.putByte(target, offset, value); + } + + @Override + public boolean getBoolean(Object target, long offset) { + return unsafe.getBoolean(target, offset); + } + + @Override + public void putBoolean(Object target, long offset, boolean value) { + unsafe.putBoolean(target, offset, value); + } + + @Override + public float getFloat(Object target, long offset) { + return unsafe.getFloat(target, offset); + } + + @Override + public void putFloat(Object target, long offset, float value) { + unsafe.putFloat(target, offset, value); + } + + @Override + public double getDouble(Object target, long offset) { + return unsafe.getDouble(target, offset); + } + + @Override + public void putDouble(Object target, long offset, double value) { + unsafe.putDouble(target, offset, value); + } + + @Override + public void copyMemory( + Object src, long srcOffset, Object target, long targetOffset, long length) { + unsafe.copyMemory(src, srcOffset, target, targetOffset, length); + } + + @Override + public void copyMemory(long srcAddress, long targetAddress, long length) { + unsafe.copyMemory(srcAddress, targetAddress, length); + } + + @Override + public Object getStaticObject(Field field) { + return getObject(unsafe.staticFieldBase(field), unsafe.staticFieldOffset(field)); + } + } + } diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java index b84efd68b3..5b2c5a85c5 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Utf8.java @@ -1523,7 +1523,7 @@ final class Utf8 { // the index (relative to the start of the array) is also 8-byte aligned. We do this by // ANDing the index with 7 to determine the number of bytes that need to be read before // we're 8-byte aligned. - final int unaligned = (int) offset & 7; + final int unaligned = 8 - ((int) offset & 7); for (int j = unaligned; j > 0; j--) { if (UnsafeUtil.getByte(bytes, offset++) < 0) { return unaligned - j; diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Value.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Value.java index 593043b948..8e6d4651b0 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Value.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/Value.java @@ -495,6 +495,17 @@ public final class Value extends } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Value parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Value parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Value parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java index 71975c2c7c..51a79ba28a 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/com/google/protobuf/compiler/PluginProtos.java @@ -389,6 +389,17 @@ public final class PluginProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.Version parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -972,6 +983,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -990,6 +1003,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1007,6 +1022,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1024,6 +1041,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1042,6 +1061,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1312,6 +1333,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1331,6 +1354,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1351,6 +1376,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1370,6 +1397,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1389,6 +1418,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -1552,6 +1583,17 @@ public final class PluginProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2126,6 +2168,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2149,6 +2193,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2172,6 +2218,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2195,6 +2243,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2225,6 +2275,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2252,6 +2304,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2281,6 +2335,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2311,6 +2367,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2338,6 +2396,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2365,6 +2425,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2393,6 +2455,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2419,6 +2483,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2445,6 +2511,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2465,6 +2533,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2488,6 +2558,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2512,6 +2584,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2532,6 +2606,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -2553,6 +2629,8 @@ public final class PluginProtos { * the entire set into memory at once. However, as of this writing, this * is not similarly optimized on protoc's end -- it will store all fields in * memory at once before sending them to the plugin. + * Type names of fields and extensions in the FileDescriptorProto are always + * fully qualified. * * * repeated .google.protobuf.FileDescriptorProto proto_file = 15; @@ -3621,6 +3699,17 @@ public final class PluginProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse.File parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse.File parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse.File parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4648,6 +4737,17 @@ public final class PluginProtos { } public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5377,8 +5477,9 @@ public final class PluginProtos { "\t\022B\n\004file\030\017 \003(\01324.google.protobuf.compil" + "er.CodeGeneratorResponse.File\032>\n\004File\022\014\n" + "\004name\030\001 \001(\t\022\027\n\017insertion_point\030\002 \001(\t\022\017\n\007" + - "content\030\017 \001(\tB7\n\034com.google.protobuf.com" + - "pilerB\014PluginProtosZ\tplugin_go" + "content\030\017 \001(\tBg\n\034com.google.protobuf.com" + + "pilerB\014PluginProtosZ9github.com/golang/p" + + "rotobuf/protoc-gen-go/plugin;plugin_go" }; org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java index 46e9c0186c..373120a154 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProcedureProtos.java @@ -221,6 +221,17 @@ public final class TestProcedureProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProcedureProtos.TestTableDDLStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java index a30032daff..7a2076f7b3 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/ipc/protobuf/generated/TestProtos.java @@ -137,6 +137,17 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyRequestProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -510,6 +521,17 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EmptyResponseProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -967,6 +989,17 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoRequestProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1518,6 +1551,17 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.EchoResponseProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2037,6 +2081,17 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.PauseRequestProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2542,6 +2597,17 @@ public final class TestProtos { } public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.ipc.protobuf.generated.TestProtos.AddrResponseProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java index e83a7aca6a..b6cbdf7784 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java @@ -606,6 +606,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1705,6 +1716,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2548,6 +2570,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3159,6 +3192,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3748,6 +3792,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UserPermission parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4569,6 +4624,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions.UserPermissions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5265,6 +5331,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.UsersAndPermissions parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6052,6 +6129,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6612,6 +6700,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7058,6 +7157,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7577,6 +7687,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8119,6 +8240,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8818,6 +8950,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9572,6 +9715,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.CheckPermissionsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10234,6 +10388,17 @@ public final class AccessControlProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.CheckPermissionsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java index 812cf3bba9..9fdead7ff0 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java @@ -257,6 +257,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1223,6 +1234,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2086,6 +2108,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2779,6 +2812,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3264,6 +3308,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3729,6 +3784,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4817,6 +4883,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5787,6 +5864,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6763,6 +6851,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7302,6 +7401,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7821,6 +7931,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8498,6 +8619,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9356,6 +9488,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9967,6 +10110,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10738,6 +10892,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11369,6 +11534,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11941,6 +12117,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.SplitRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12487,6 +12674,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13098,6 +13296,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13742,6 +13951,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest.RegionUpdateInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14533,6 +14753,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15243,6 +15474,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateFavoredNodesResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15901,6 +16143,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16946,6 +17199,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17878,6 +18142,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ReplicateWALEntryResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18251,6 +18526,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18719,6 +19005,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -19300,6 +19597,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -19767,6 +20075,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -20140,6 +20459,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -20632,6 +20962,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -21265,6 +21606,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -21784,6 +22136,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -22157,6 +22520,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -22601,6 +22975,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -23211,6 +23596,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -23952,6 +24348,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -24437,6 +24844,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -24993,6 +25411,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26119,6 +26548,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -27324,6 +27764,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28090,6 +28541,17 @@ public final class AdminProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java index 0271777585..00d35c4b98 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java @@ -350,6 +350,17 @@ public final class BackupProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1082,6 +1093,17 @@ public final class BackupProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2475,6 +2497,17 @@ public final class BackupProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4173,6 +4206,17 @@ public final class BackupProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupTableInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupTableInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupTableInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5748,6 +5792,17 @@ public final class BackupProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java index e1d71218a1..7a9f5f2d5f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/CellProtos.java @@ -612,6 +612,17 @@ public final class CellProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.Cell parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1649,6 +1660,17 @@ public final class CellProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.CellProtos.KeyValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java index b93f6cc7e0..39ef8663ca 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClientProtos.java @@ -316,6 +316,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Authorizations parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -895,6 +906,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CellVisibility parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1489,6 +1511,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Column parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2838,6 +2871,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Get parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5162,6 +5206,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Result parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6328,6 +6383,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7059,6 +7125,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7858,6 +7935,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Condition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9581,6 +9669,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10191,6 +10290,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11195,6 +11305,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12899,6 +13020,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13880,6 +14012,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15878,6 +16021,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Scan parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18800,6 +18954,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -19908,6 +20073,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -21356,6 +21532,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -22985,7 +23172,7 @@ public final class ClientProtos { * optional .hbase.pb.Cursor cursor = 12; */ private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< - org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CursorOrBuilder> + org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Cursor.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CursorOrBuilder> getCursorFieldBuilder() { if (cursorBuilder_ == null) { cursorBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< @@ -23519,6 +23706,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -24222,6 +24420,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -25392,6 +25601,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26063,6 +26283,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.DelegationToken parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26844,6 +27075,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -27588,6 +27830,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28208,6 +28461,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28816,6 +29080,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -29455,6 +29730,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30177,6 +30463,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30839,6 +31136,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -31644,6 +31952,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -32584,6 +32903,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.Action parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -33681,6 +34011,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -34745,6 +35086,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -35478,6 +35830,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRegionLoadStats parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -36809,6 +37172,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -38071,6 +38445,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -39135,6 +39520,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -40215,6 +40611,17 @@ public final class ClientProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -41831,7 +42238,7 @@ public final class ClientProtos { internal_static_hbase_pb_ScanRequest_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor internal_static_hbase_pb_Cursor_descriptor; - private static final + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_hbase_pb_Cursor_fieldAccessorTable; private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java index 83f3281fa8..6287297482 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterIdProtos.java @@ -251,6 +251,17 @@ public final class ClusterIdProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos.ClusterId parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java index 4f67ecf25b..9430875526 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java @@ -642,6 +642,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1394,6 +1405,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2163,6 +2185,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2786,6 +2819,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStoreSequenceIds parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4792,6 +4836,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6779,6 +6834,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSink parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7531,6 +7597,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ReplicationLoadSource parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9174,6 +9251,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ServerLoad parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11457,6 +11545,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12829,6 +12928,17 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java index dd3faed874..a80a40f2ed 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ComparatorProtos.java @@ -267,6 +267,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.Comparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -826,6 +837,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.ByteArrayComparable parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1316,6 +1338,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1901,6 +1934,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.LongComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2486,6 +2530,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BinaryPrefixComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3226,6 +3281,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.BitComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3790,6 +3856,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.NullComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4457,6 +4534,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.RegexStringComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5229,6 +5317,17 @@ public final class ComparatorProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos.SubstringComparator parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java index eb48984d27..ee6566834f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/EncryptionProtos.java @@ -413,6 +413,17 @@ public final class EncryptionProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.EncryptionProtos.WrappedKey parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java index 64e439adc2..20496e3764 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ErrorHandlingProtos.java @@ -425,6 +425,17 @@ public final class ErrorHandlingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1407,6 +1418,17 @@ public final class ErrorHandlingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2441,6 +2463,17 @@ public final class ErrorHandlingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java index d34c1103bb..b0860853e0 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FSProtos.java @@ -226,6 +226,17 @@ public final class FSProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -901,6 +912,17 @@ public final class FSProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FSProtos.Reference parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java index a462d04a33..53291b25bc 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/FilterProtos.java @@ -267,6 +267,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.Filter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -830,6 +841,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1395,6 +1417,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1953,6 +1986,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2565,6 +2609,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3239,6 +3294,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.CompareFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4017,6 +4083,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4738,6 +4815,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FamilyFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5496,6 +5584,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterList parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6281,6 +6380,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterWrapper parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6800,6 +6910,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7239,6 +7360,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7795,6 +7927,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8505,6 +8648,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8979,6 +9133,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9466,6 +9631,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9983,6 +10159,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PageFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10452,6 +10639,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.PrefixFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10946,6 +11144,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.QualifierFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11520,6 +11729,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RandomRowFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12014,6 +12234,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12606,6 +12837,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13440,6 +13682,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14250,6 +14503,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.SkipFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14907,6 +15171,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.TimestampsFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15481,6 +15756,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.ValueFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16073,6 +16359,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16592,6 +16889,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.FilterAllFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17153,6 +17461,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.RowRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17786,6 +18105,17 @@ public final class FilterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java index b3b0831934..43148fba04 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java @@ -518,6 +518,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1344,6 +1355,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2919,6 +2941,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3604,6 +3637,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4969,6 +5013,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5845,6 +5900,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6734,6 +6800,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7318,6 +7395,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7908,6 +7996,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8657,6 +8756,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9295,6 +9405,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9928,6 +10049,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10615,6 +10747,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11228,6 +11371,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11826,6 +11980,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12684,6 +12849,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13619,6 +13795,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.EmptyMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14045,6 +14232,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.LongMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14521,6 +14719,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.DoubleMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14994,6 +15203,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BigDecimalMsg parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15522,6 +15742,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16130,6 +16361,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NamespaceDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17431,6 +17673,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18556,6 +18809,17 @@ public final class HBaseProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java index 1de62a2f67..546c7d436d 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HFileProtos.java @@ -233,6 +233,17 @@ public final class HFileProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileInfoProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1542,6 +1553,17 @@ public final class HFileProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos.FileTrailerProto parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java index fd83795864..567e18ecfb 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LoadBalancerProtos.java @@ -186,6 +186,17 @@ public final class LoadBalancerProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java index 99853a5687..df07b8e84e 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/LockServiceProtos.java @@ -803,6 +803,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1965,6 +1976,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2486,6 +2508,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3158,6 +3191,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4061,6 +4105,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockProcedureData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5258,6 +5313,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.WaitingProcedure parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6223,6 +6289,17 @@ public final class LockServiceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java index cc06b3b45d..d715911496 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MapReduceProtos.java @@ -223,6 +223,17 @@ public final class MapReduceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1098,6 +1109,17 @@ public final class MapReduceProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java index 0ec9b22953..7f4e4aca29 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java @@ -3089,6 +3089,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateTableStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4301,6 +4312,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5500,6 +5522,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.TruncateTableStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6867,6 +6900,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteTableStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7888,6 +7932,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CreateNamespaceStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8549,6 +8604,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyNamespaceStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9362,6 +9428,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteNamespaceStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10254,6 +10331,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11482,6 +11570,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12689,6 +12788,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13733,6 +13843,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.EnableTableStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14633,6 +14754,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DisableTableStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15586,6 +15718,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreParentToChildRegionsPair parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16630,6 +16773,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.CloneSnapshotStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18586,6 +18740,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RestoreSnapshotStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -20819,6 +20984,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.DispatchMergingRegionsStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -22043,6 +22219,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.SplitTableRegionStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -23271,6 +23458,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MergeTableRegionsStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MergeTableRegionsStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MergeTableRegionsStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -24655,6 +24853,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26116,6 +26325,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AssignRegionStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AssignRegionStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AssignRegionStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -27239,6 +27459,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.UnassignRegionStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28432,6 +28663,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MoveRegionStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -29307,6 +29549,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCRegionStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCRegionStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCRegionStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30041,6 +30294,17 @@ public final class MasterProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCMergedRegionsStateData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCMergedRegionsStateData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCMergedRegionsStateData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java index 5ea20446d3..a7e4598f18 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java @@ -465,6 +465,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1257,6 +1268,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1892,6 +1914,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2589,6 +2622,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3245,6 +3289,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4037,6 +4092,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4597,6 +4663,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5257,6 +5334,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5920,6 +6008,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6758,6 +6857,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7415,6 +7525,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8256,6 +8377,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MergeTableRegionsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8747,6 +8879,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9266,6 +9409,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9759,6 +9913,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10319,6 +10484,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10765,6 +10941,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11284,6 +11471,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11874,6 +12072,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12571,6 +12780,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SplitTableRegionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13221,6 +13441,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13960,6 +14191,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -14545,6 +14787,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15195,6 +15448,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -15827,6 +16091,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -16518,6 +16793,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17103,6 +17389,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -17753,6 +18050,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18338,6 +18646,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -18988,6 +19307,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -19644,6 +19974,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -20436,6 +20777,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -21021,6 +21373,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -21671,6 +22034,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -22267,6 +22641,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -22865,6 +23250,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -23450,6 +23846,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -24100,6 +24507,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -24602,6 +25020,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -25142,6 +25571,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -25661,6 +26101,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26126,6 +26577,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -26872,6 +27334,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -27431,6 +27904,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28177,6 +28661,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -28736,6 +29231,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -29398,6 +29904,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -29771,6 +30288,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30144,6 +30672,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30517,6 +31056,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -30890,6 +31440,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -31316,6 +31877,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -31786,6 +32358,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -32257,6 +32840,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -32778,6 +33372,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -33289,6 +33894,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -33707,6 +34323,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -34133,6 +34760,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -34752,6 +35390,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -35381,6 +36030,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetSplitOrMergeEnabledResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -35897,6 +36557,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -36375,6 +37046,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSplitOrMergeEnabledResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -36796,6 +37478,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -37222,6 +37915,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -37696,6 +38400,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -38166,6 +38881,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -38584,6 +39310,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -39010,6 +39747,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -39431,6 +40179,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -39868,6 +40627,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -40355,6 +41125,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -40825,6 +41606,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -41243,6 +42035,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -41669,6 +42472,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -42090,6 +42904,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -42516,6 +43341,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RunCleanerChoreResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -42990,6 +43826,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -43460,6 +44307,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetCleanerChoreRunningResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -43878,6 +44736,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -44304,6 +45173,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsCleanerChoreEnabledResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -44798,6 +45678,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -45370,6 +46261,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -45791,6 +46693,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -46256,6 +47169,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -46991,6 +47915,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -47510,6 +48445,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -48097,6 +49043,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -48792,6 +49749,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -49289,6 +50257,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -49930,6 +50909,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -50560,6 +51550,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -51127,6 +52128,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -51618,6 +52630,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -52231,6 +53254,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -52986,6 +54020,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -53957,6 +55002,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -54824,6 +55880,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -55508,6 +56575,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -56243,6 +57321,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -56835,6 +57924,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableStateResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -57354,6 +58454,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -57800,6 +58911,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -58319,6 +59441,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -58745,6 +59878,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -59239,6 +60383,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -59853,6 +61008,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -60386,6 +61552,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -61022,6 +62199,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -61634,6 +62822,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -62415,6 +63614,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -63202,6 +64412,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -63717,6 +64938,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -64138,6 +65370,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -64603,6 +65846,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -65265,6 +66519,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -65730,6 +66995,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListLocksResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -66923,6 +68199,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -68061,6 +69348,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -68507,6 +69805,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -69099,6 +70408,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -69671,6 +70991,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -70092,6 +71423,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -70681,6 +72023,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -71147,6 +72500,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -71612,6 +72976,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListDrainingRegionServersResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -72366,6 +73741,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -73028,6 +74414,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DrainRegionServersResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -73493,6 +74890,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -74155,6 +75563,17 @@ public final class MasterProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RemoveDrainFromRegionServersResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java index e6fe9b109d..67849ace04 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ProcedureProtos.java @@ -1276,6 +1276,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.Procedure parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2712,6 +2723,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.SequentialProcedureData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3222,6 +3244,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.StateMachineProcedureData parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3894,6 +3927,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4560,6 +4604,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALTrailer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5346,6 +5401,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker.TrackerNode parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5918,6 +5984,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureStoreTracker parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6980,6 +7057,17 @@ public final class ProcedureProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureWALEntry parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java index 1ccf488b16..bc9a28146f 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java @@ -793,6 +793,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1761,6 +1772,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Throttle parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3107,6 +3129,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3851,6 +3884,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4546,6 +4590,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.QuotaUsage parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5119,6 +5174,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5744,6 +5810,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6367,6 +6444,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaStatus parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6999,6 +7087,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7599,6 +7698,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8197,6 +8307,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse.RegionSizes parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8741,6 +8862,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaRegionSizesResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9403,6 +9535,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10021,6 +10164,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10664,6 +10818,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11326,6 +11491,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11977,6 +12153,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.TableQuotaSnapshot parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12774,6 +12961,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse.NamespaceQuotaSnapshot parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -13407,6 +13605,17 @@ public final class QuotaProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetQuotaStatesResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java index a7adb06abe..d7434545b0 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RPCProtos.java @@ -303,6 +303,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1401,6 +1412,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2595,6 +2617,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3221,6 +3254,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CellBlockMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4087,6 +4131,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5179,6 +5234,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.CryptoCipherMeta parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6298,6 +6364,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7512,6 +7589,17 @@ public final class RPCProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java index 2421f0af71..7564d3ae13 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionNormalizerProtos.java @@ -186,6 +186,17 @@ public final class RegionNormalizerProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos.RegionNormalizerState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java index 299b55e863..e86b5a0cc2 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java @@ -441,6 +441,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1271,6 +1282,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerStartupResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2225,6 +2247,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2921,6 +2954,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionServerReportResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3497,6 +3541,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4166,6 +4221,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRSFatalErrorResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4607,6 +4673,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5247,6 +5324,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.GetLastFlushedSequenceIdResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6482,6 +6570,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7511,6 +7610,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8457,6 +8567,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9103,6 +9224,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9811,6 +9943,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10478,6 +10621,17 @@ public final class RegionServerStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionSpaceUseReportResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ReplicationProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ReplicationProtos.java index 4052c12c0d..4469f65017 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ReplicationProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ReplicationProtos.java @@ -273,6 +273,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TableCF parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TableCF parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.TableCF parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1462,6 +1473,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationPeer parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationPeer parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationPeer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3195,6 +3217,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3856,6 +3889,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationPeerDescription parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationPeerDescription parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationPeerDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4670,6 +4714,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationHLogPosition parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationHLogPosition parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ReplicationHLogPosition parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5251,6 +5306,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5860,6 +5926,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.AddReplicationPeerResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6317,6 +6394,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6784,6 +6872,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.RemoveReplicationPeerResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7241,6 +7340,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7708,6 +7818,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.EnableReplicationPeerResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8165,6 +8286,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8632,6 +8764,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.DisableReplicationPeerResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9089,6 +9232,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9711,6 +9865,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.GetReplicationPeerConfigResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10475,6 +10640,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11084,6 +11260,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.UpdateReplicationPeerConfigResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11537,6 +11724,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersRequest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -12093,6 +12291,17 @@ public final class ReplicationProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.ListReplicationPeersResponse parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java index 4d5953c2cf..f66cf5f18b 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java @@ -719,6 +719,17 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2039,6 +2050,17 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3089,6 +3111,17 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3897,6 +3930,17 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4670,6 +4714,17 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -5681,6 +5736,17 @@ public final class SnapshotProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDataManifest parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java index f4897f3658..8be1f62a58 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/TracingProtos.java @@ -241,6 +241,17 @@ public final class TracingProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.TracingProtos.RPCTInfo parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java index 048f4b1512..2fbb62027b 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java @@ -534,6 +534,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1950,6 +1961,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3654,6 +3676,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4658,6 +4691,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -6215,6 +6259,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -7061,6 +7116,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -8304,6 +8370,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -9315,6 +9392,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -10763,6 +10851,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -11860,6 +11959,17 @@ public final class WALProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALTrailer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java index f685c6b6e6..b9286d0b0c 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ZooKeeperProtos.java @@ -383,6 +383,17 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -1288,6 +1299,17 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.Master parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2065,6 +2087,17 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -2970,6 +3003,17 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -3788,6 +3832,17 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.DeprecatedTableState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); @@ -4297,6 +4352,17 @@ public final class ZooKeeperProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + java.nio.ByteBuffer data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( + java.nio.ByteBuffer data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SwitchState parseFrom( org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml index 1714b51acf..bde90f8021 100644 --- a/hbase-rest/pom.xml +++ b/hbase-rest/pom.xml @@ -237,8 +237,8 @@ - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous com.google.protobuf @@ -369,6 +369,12 @@ org.apache.hadoop hadoop-mapreduce-client-core test + + + com.google.guava + guava + + org.apache.hadoop @@ -397,6 +403,12 @@ org.apache.hadoop hadoop-mapreduce-client-core test + + + com.google.guava + guava + + org.apache.hadoop diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java index ba646c21d9..5c43287fb4 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.util.StringUtils; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.eclipse.jetty.http.HttpVersion; import org.eclipse.jetty.server.Server; diff --git a/hbase-rsgroup/pom.xml b/hbase-rsgroup/pom.xml index c8220becb1..1aef4c19e4 100644 --- a/hbase-rsgroup/pom.xml +++ b/hbase-rsgroup/pom.xml @@ -145,8 +145,8 @@ commons-logging - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous com.google.protobuf diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java index 9a45e6ed9d..d8118746e1 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminClient.java @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RSGroupAdmi import org.apache.hadoop.hbase.protobuf.generated.RSGroupAdminProtos.RemoveRSGroupRequest; import org.apache.hadoop.hbase.protobuf.generated.RSGroupProtos; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import com.google.protobuf.ServiceException; /** diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java index fd93b3bf92..91d31d0530 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.HashSet; import java.util.Set; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.Service; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java index 430c8fc504..b9c19227bb 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java @@ -49,8 +49,8 @@ import org.apache.hadoop.hbase.master.locking.LockManager; import org.apache.hadoop.hbase.master.locking.LockProcedure; import org.apache.hadoop.hbase.net.Address; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; /** * Service to support Region Server Grouping (HBase-6721). diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java index 24baa62b98..bc36bc2cb0 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hbase.rsgroup; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.LinkedListMultimap; -import com.google.common.collect.ListMultimap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.LinkedListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java index f2c6118cdb..8cb56061e7 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java @@ -81,9 +81,9 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import com.google.protobuf.ServiceException; /** diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java index 9dd2c5c052..fa170febf2 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.master.balancer; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java index 264ea39270..c58dc9db24 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java @@ -55,7 +55,7 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Ignore // TODO: Fix after HBASE-14614 goes in. @Category({MediumTests.class}) diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java index a99c24f9e4..367bb98c3f 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java @@ -57,8 +57,8 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.junit.rules.TestName; public abstract class TestRSGroupsBase { diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java index 8b200abbaa..691f8d35a0 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsOfflineMode.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.rsgroup; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseCluster; diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java index 2e8911005c..37cdd3553a 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/VerifyingRSGroupAdminClient.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.rsgroup; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/pom.xml b/hbase-server/pom.xml index c4c6f1d06a..7452af5673 100644 --- a/hbase-server/pom.xml +++ b/hbase-server/pom.xml @@ -475,8 +475,8 @@ metrics-core - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous com.google.protobuf @@ -568,6 +568,17 @@ httpcore test + + org.apache.commons + commons-crypto + ${commons-crypto.version} + + + net.java.dev.jna + jna + + + @@ -681,31 +692,67 @@ org.apache.hadoop hadoop-client + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-core + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-jobclient test-jar test + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs test-jar test + + + com.google.guava + guava + + org.apache.hadoop hadoop-minicluster test + + + com.google.guava + guava + + @@ -767,6 +814,12 @@ org.apache.hadoop hadoop-minicluster + + + com.google.guava + guava + + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java index 6ae9637217..e1f92ec605 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ZKNamespaceManager.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java index 8e6e843e6a..ed554ad35e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/BackupHFileCleaner.java @@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Predicate; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * Implementation of a file cleaner that checks if an hfile is still referenced by backup before diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java index 36a910558f..af1c0c9863 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java @@ -42,10 +42,10 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.io.MultipleIOException; -import com.google.common.base.Function; -import com.google.common.base.Preconditions; -import com.google.common.collect.Collections2; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Utility class to handle the removal of HFiles (or the respective {@link StoreFile StoreFiles}) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java index d1ee8e13dd..6e35d92740 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Private public class BackupAdminImpl implements BackupAdmin { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java index 2a5c9595e7..aa15fbaf5c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupCommands.java @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * General backup commands, options and usage messages diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java index a929700a98..bf80506d35 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupManager.java @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.procedure.ProcedureManagerHost; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Handles backup requests, creates backup info records in backup system table to diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java index 96486ad528..6eec46082f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/TableBackupClient.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Base class for backup operation. Concrete implementation for diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java index 604e5021f5..e254f1dd76 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/HFileSplitterJob.java @@ -131,7 +131,7 @@ public class HFileSplitterJob extends Configured implements Tool { LOG.debug("success configuring load incremental job"); TableMapReduceUtil.addDependencyJars(job.getConfiguration(), - com.google.common.base.Preconditions.class); + org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.class); } else { throw new IOException("No bulk output directory specified"); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java index e1dc7f9780..297371a66b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockR import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService; import org.apache.hadoop.hbase.util.Threads; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Lock for HBase Entity either a Table, a Namespace, or Regions. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/LockServiceClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/LockServiceClient.java index e890afd2ac..dd6a6c55af 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/LockServiceClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/LockServiceClient.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.client.locking; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.List; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.java index 343a1b0e8a..af7533651c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogManagerCoordination.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective; import org.apache.hadoop.hbase.master.SplitLogManager.Task; import org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Coordination for SplitLogManager. It creates and works with tasks for split log operations
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.java index 2afe4e0a16..5b26c494dc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coordination/SplitLogWorkerCoordination.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.SplitLogWorker; import org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Coordinated operations for {@link SplitLogWorker} and diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetricsCoprocessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetricsCoprocessor.java index d564002b97..f07742fdc9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetricsCoprocessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MetricsCoprocessor.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.metrics.MetricRegistries; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.MetricRegistryInfo; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Utility class for tracking metrics for various types of coprocessors. Each coprocessor instance diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java index ca68de20f6..da78a09890 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.coprocessor; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import java.io.IOException; import java.util.Collections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java index df7653fd0c..d1dd6a1eed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java @@ -37,10 +37,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.monitoring.ThreadMonitoring; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * This is a generic executor service. This component abstracts a diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java index bdec8dd974..f20c876f08 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeAssignmentHelper.java @@ -31,7 +31,7 @@ import java.util.Map.Entry; import java.util.Random; import java.util.Set; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -54,8 +54,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.FavoredNode import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * Helper class for {@link FavoredNodeLoadBalancer} that has all the intelligence for racks, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java index 00a29b206f..8b57ce6f5e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodeLoadBalancer.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.master.*; import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * An implementation of the {@link org.apache.hadoop.hbase.master.LoadBalancer} that diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java index be4aad5e7e..73f1c6dc41 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/FavoredNodesManager.java @@ -46,9 +46,9 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.net.NetUtils; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * FavoredNodesManager is responsible for maintaining favored nodes info in internal cache and diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java index 095ee29485..c62b0a0b95 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.favored; -import com.google.common.net.HostAndPort; +import org.apache.hadoop.hbase.shaded.com.google.common.net.HostAndPort; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.util.Addressing; @@ -46,7 +46,7 @@ class StartcodeAgnosticServerName extends ServerName { } public static StartcodeAgnosticServerName valueOf(final HostAndPort hostnameAndPort, long startcode) { - return new StartcodeAgnosticServerName(hostnameAndPort.getHostText(), + return new StartcodeAgnosticServerName(hostnameAndPort.getHost(), hostnameAndPort.getPort(), startcode); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index c7e11537ce..c9561e8c64 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -90,8 +90,8 @@ import org.eclipse.jetty.webapp.WebAppContext; import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.servlet.ServletContainer; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Create a Jetty embedded server to answer http requests. The primary goal diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java index fb6ebeb5ba..6ae06486a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java @@ -32,7 +32,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.fs.HFileSystem; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Wrapper for input stream(s) that takes care of the interaction of FS and HBase checksums, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java index cb0b2096a1..a6eeb0ea1c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/MetricsIO.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.io; import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactory; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; public class MetricsIO { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java index 7fe86bea2c..bf9937ef16 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.io.asyncfs; -import com.google.common.base.Throwables; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import io.netty.channel.EventLoop; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java index c64cdf7440..77e793c639 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutput.java @@ -71,7 +71,7 @@ import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.util.DataChecksum; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * An asynchronous HDFS output stream implementation which fans out data to datanode and only diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java index 3eaacc42f7..2c8da06733 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java @@ -28,8 +28,8 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT; import static org.apache.hadoop.hdfs.protocol.datatransfer.BlockConstructionStage.PIPELINE_SETUP_CREATE; -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import com.google.protobuf.CodedOutputStream; import io.netty.bootstrap.Bootstrap; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java index 9bf869592e..c1e4c242bd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.io.asyncfs; import static io.netty.handler.timeout.IdleState.READER_IDLE; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY; -import com.google.common.base.Charsets; -import com.google.common.base.Throwables; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import com.google.protobuf.CodedOutputStream; import io.netty.buffer.ByteBuf; @@ -85,7 +85,7 @@ import org.apache.hadoop.crypto.Encryptor; import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.util.ByteStringer; +import com.google.protobuf.ByteString; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -365,7 +365,9 @@ public final class FanOutOneBlockAsyncDFSOutputSaslHelper { DataTransferEncryptorMessageProto.newBuilder(); builder.setStatus(DataTransferEncryptorStatus.SUCCESS); if (payload != null) { - builder.setPayload(ByteStringer.wrap(payload)); + // Was ByteStringer; fix w/o using ByteStringer. Its in hbase-protocol + // and we want to keep that out of hbase-server. + builder.setPayload(ByteString.copyFrom(payload)); } if (options != null) { builder.addAllCipherOption(PB_HELPER.convertCipherOptions(options)); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java index d5140037a3..aefaea9e2a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java index 4ceda39cc0..3efd3fe784 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index feddc2cba8..ad1ce9fcf0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -69,8 +69,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.io.Writable; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * File format for hbase. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 72f96a5a8c..51e08525dd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.IOUtils; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Reads {@link HFile} version 2 blocks to HFiles and via {@link Cacheable} Interface to caches. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java index d310a68448..bf7f23fd1c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderImpl.java @@ -60,7 +60,7 @@ import org.apache.hadoop.io.WritableUtils; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Implementation that can handle all hfile versions of {@link HFile.Reader}. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 3cced66631..c1870221ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -47,9 +47,10 @@ import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Objects; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * A block cache implementation that is memory-aware using {@link HeapSize}, @@ -722,7 +723,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("blockCount", getBlockCount()) .add("currentSize", getCurrentSize()) .add("freeSize", getFreeSize()) @@ -807,7 +808,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { @Override public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("name", name) .add("totalSize", StringUtils.byteDesc(totalSize)) .add("bucketSize", StringUtils.byteDesc(bucketSize)) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java index 0b28d72d42..6384beadf5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlockQueue.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.io.hfile; -import com.google.common.collect.MinMaxPriorityQueue; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java index d9ee64c891..5c8fa1ba56 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java @@ -29,7 +29,7 @@ import java.util.Queue; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import com.google.common.collect.MinMaxPriorityQueue; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; import org.apache.commons.collections.map.LinkedMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry; import org.codehaus.jackson.annotate.JsonIgnoreProperties; -import com.google.common.base.Objects; -import com.google.common.base.Preconditions; -import com.google.common.primitives.Ints; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints; /** * This class is used to allocate a block with specified size and free the block @@ -268,7 +268,7 @@ public final class BucketAllocator { @Override public String toString() { - return Objects.toStringHelper(this.getClass()) + return MoreObjects.toStringHelper(this.getClass()) .add("sizeIndex", sizeIndex) .add("bucketSize", bucketSizes[sizeIndex]) .toString(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index 3c27f14eff..1017890c8a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -75,8 +75,8 @@ import org.apache.hadoop.hbase.util.IdReadWriteLock; import org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * BucketCache uses {@link BucketAllocator} to allocate/free blocks, and uses diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java index 0e33a569f5..8f478d487f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry; -import com.google.common.collect.MinMaxPriorityQueue; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; /** * A memory-bound queue that will grow until an element brings total size larger diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java index 313535dfdb..301f0e79a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.util.BoundedPriorityBlockingQueue; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; /** * Runs the CallRunners passed here via {@link #dispatch(CallRunner)}. Subclass and add particular diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 0a3db40945..dad00f6efe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.ipc; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.net.InetAddress; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java index a8479314db..337c3aeb0a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.regionserver.RSRpcServices; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.security.authorize.PolicyProvider; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java index 040209bbe9..9bdf6f81e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcServer.java @@ -57,7 +57,7 @@ import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * The RPC server with native java NIO implementation deriving from Hadoop to diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/Driver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/Driver.java index c23fc84e5c..618c14aed6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/Driver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/Driver.java @@ -23,7 +23,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.util.ProgramDriver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Driver for hbase mapreduce jobs. Select which to run by passing name of job diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index 9a8911e4a7..078033e811 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -48,7 +48,7 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index f847608846..f69b738d86 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -85,7 +85,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Writes HFiles. Passed Cells must arrive in order. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java index 2834f86898..dfac471579 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java @@ -57,9 +57,9 @@ import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.common.base.Charsets; -import com.google.common.base.Throwables; -import com.google.common.collect.Ordering; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering; public class HashTable extends Configured implements Tool { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index 3c72c2bfaa..4ff527f34b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -642,7 +642,7 @@ public class Import extends Configured implements Tool { job.setPartitionerClass(KeyValueWritableComparablePartitioner.class); job.setNumReduceTasks(regionLocator.getStartKeys().length); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), - com.google.common.base.Preconditions.class); + org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.class); } } else if (hfileOutPath != null) { LOG.info("writing to hfiles for bulk load."); @@ -657,7 +657,7 @@ public class Import extends Configured implements Tool { job.setMapOutputValueClass(KeyValue.class); HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), - com.google.common.base.Preconditions.class); + org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.class); } } else { LOG.info("writing directly to table from Mapper."); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java index aa7b12987a..3dc4369fbc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java @@ -60,9 +60,9 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.common.base.Preconditions; -import com.google.common.base.Splitter; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Splitter; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Tool to import data from a TSV file. @@ -586,7 +586,7 @@ public class ImportTsv extends Configured implements Tool { } TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), - com.google.common.base.Function.class /* Guava used by TsvParser */); + org.apache.hadoop.hbase.shaded.com.google.common.base.Function.class /* Guava used by TsvParser */); } } return job; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java index 7d0216a00b..a6a2bfd68a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/JarFinder.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.BufferedOutputStream; import java.io.File; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index 3af42905f1..5a8e8a890e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.mapreduce; import static java.lang.String.format; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; -import com.google.common.collect.Multimaps; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimaps; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java index fdcf30ea50..bdc3d5bee2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HTableDescriptor; @@ -31,6 +30,7 @@ import org.apache.hadoop.mapreduce.Job; import java.io.IOException; import java.nio.charset.Charset; import java.util.List; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Create 3 level tree directory, first level is using table name as parent diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java index 6ba8138e2f..0f07a58e35 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java index 5c46f2ac95..4331c0f5a0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormatImpl.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java index 954194efa2..c72a0c3ca5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.Iterator; +import java.util.Collections; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -50,8 +51,8 @@ import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.common.base.Throwables; -import com.google.common.collect.Iterators; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; public class SyncTable extends Configured implements Tool { @@ -302,7 +303,7 @@ public class SyncTable extends Configured implements Tool { } private static final CellScanner EMPTY_CELL_SCANNER - = new CellScanner(Iterators.emptyIterator()); + = new CellScanner(Collections.emptyIterator()); /** * Rescan the given range directly from the source and target tables. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index 7c18537436..61244ccca9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -794,7 +794,7 @@ public class TableMapReduceUtil { org.apache.zookeeper.ZooKeeper.class, io.netty.channel.Channel.class, com.google.protobuf.Message.class, - com.google.common.collect.Lists.class, + org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists.class, org.apache.htrace.Trace.class, com.codahale.metrics.MetricRegistry.class); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java index e6467278ad..9a1c98e018 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java @@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Iterate over an HBase table data, return (ImmutableBytesWritable, Result) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java index a5f699d0d2..7e59c3bb91 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java index 1a268d0fcf..2f6955ed7f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormatImpl.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index 06e43e73a0..e35c50a255 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -310,7 +310,7 @@ public class WALPlayer extends Configured implements Tool { HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator); } TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), - com.google.common.base.Preconditions.class); + org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.class); } else { // output to live cluster job.setMapperClass(WALMapper.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java index 3d32edd139..8bb266e15e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java @@ -65,7 +65,7 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This map-only job compares the data from a local table with a remote one. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java index 4775a0ad55..ba92c765e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.PairOfSameType; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Triple; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * A janitor for the catalog tables. Scans the hbase:meta catalog diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index aa0d80368f..84bd03378b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -197,9 +197,9 @@ import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.webapp.WebAppContext; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import com.google.protobuf.Descriptors; import com.google.protobuf.Service; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java index 4d7d561c09..1e69852d2e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import com.google.protobuf.Service; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java index 928702e427..c1e39fdc85 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterWalManager.java @@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WALSplitter; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This class abstracts a bunch of operations the HMaster needs diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index 6ae9f0fabc..96f10b6de0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -82,7 +82,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.zookeeper.KeeperException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * The ServerManager class manages info about region servers. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java index 2fc2bbbc67..7e35fe8d1d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java @@ -24,7 +24,7 @@ import static org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.F import static org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.IN_PROGRESS; import static org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus.SUCCESS; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java index 4a2c942e07..3a11e23c80 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableNamespaceManager.java @@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * This is a helper class used internally to manage the namespace metadata that is stored in diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java index dfc43219f6..3b13b871d2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java @@ -24,7 +24,7 @@ import java.util.Set; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import org.apache.commons.logging.Log; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java index a7fb743e20..3e5b91f6a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java @@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Threads; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * The AssignmentManager is the coordinator for region assign/unassign operations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java index 3874232b7c..29d0676185 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/GCRegionProcedure.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.GCRegionState; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * GC a Region that is no longer in use. It has been split or merged away. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java index 83d5506e1d..193a746eb9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java @@ -65,7 +65,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * The procedure to Merge a region in a table. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java index 21e0d9c205..627eb57c01 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStateStore.java @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.zookeeper.KeeperException; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Store Region State to hbase:meta table. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java index 32be134287..df55c94b19 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionStates.java @@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.procedure2.ProcedureEvent; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * RegionStates contains a set of Maps that describes the in-memory state of the AM, with diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java index 219b67bd93..55b7a6006f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.master.assignment; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java index fda9f60ef9..785f6d51f5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java @@ -55,11 +55,11 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.T import org.apache.hadoop.hbase.security.access.AccessControlLists; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Joiner; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * The base class for load balancers. It provides the the functions used to by diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java index a8e22cec25..30cf16a7e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/FavoredStochasticBalancer.java @@ -47,9 +47,9 @@ import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * An implementation of the {@link org.apache.hadoop.hbase.master.LoadBalancer} that diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java index 907e7455b0..d9a1ab8b9a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java @@ -43,15 +43,15 @@ import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.Lists; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Futures; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListeningExecutorService; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.MoreExecutors; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * This will find where data for a region is located in HDFS. It ranks diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java index 818156d6e3..9bef4bf880 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/SimpleLoadBalancer.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.collect.MinMaxPriorityQueue; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue; /** * Makes decisions about the placement and movement of Regions across diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java index 6f0558132a..ebd86be8fc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java @@ -53,8 +53,8 @@ import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.SwapRegi import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.base.Optional; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** *

This is a best effort load balancer. Given a Cost function F(C) => x It will diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java index 891db22e98..920726f4a0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseFileCleanerDelegate.java @@ -20,8 +20,8 @@ package org.apache.hadoop.hbase.master.cleaner; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.hbase.BaseConfigurable; -import com.google.common.base.Predicate; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import java.util.Map; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index 1e9a4fa4ef..e731c43695 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hbase.master.cleaner; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java index 8b3515a791..52c39ad535 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.util.StealJobQueue; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This Chore, every time it runs, will clear the HFiles in the hfile archive * folder that are deletable for each HFile cleaner in the chain. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java index b72e219a54..6c8bbbac3b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/locking/LockManager.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.master.locking; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HRegionInfo; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java index c1d03267fc..afe72e23e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java @@ -60,7 +60,7 @@ import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; @InterfaceAudience.Private public class CloneSnapshotProcedure diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java index c3900dd58f..cf55463d99 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.ModifyRegionUtils; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.Private public class CreateTableProcedure diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java index 575fa809ea..10e6aa06d8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.master.procedure; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java index eda1128a2a..5d871ad382 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.master.procedure; -import com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import java.io.IOException; import java.net.SocketTimeoutException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java index 8d6568f664..a6969d5a2c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.Snapshot import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Handle the master side of taking a snapshot of an online table, regardless of snapshot type. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java index b6641de004..2e95d3f6ec 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java @@ -29,8 +29,8 @@ import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.locks.ReentrantLock; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java index 1ff05ebdfb..308e2169a1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobFileCache.java @@ -38,7 +38,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.IdLock; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * The cache for mob files. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java index 5fe0002014..d37292cc98 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java @@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * An implementation of {@link MobCompactor} that compacts the mob files in partitions. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/LogMonitoring.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/LogMonitoring.java index d10fdfa2f4..551369e01e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/LogMonitoring.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/LogMonitoring.java @@ -34,7 +34,7 @@ import org.apache.log4j.Appender; import org.apache.log4j.FileAppender; import org.apache.log4j.Logger; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * Utility functions for reading the log4j logs that are diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java index c7616c5241..0162ddba40 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java @@ -26,9 +26,9 @@ import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.base.Charsets; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * A size-bounded repository of alerts, which are kept diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java index ff92704b96..dc961797bb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java @@ -32,8 +32,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Singleton which keeps track of tasks going on in this VM. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceTableAndRegionInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceTableAndRegionInfo.java index d30de6e2c9..4db4cf853e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceTableAndRegionInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/namespace/NamespaceTableAndRegionInfo.java @@ -26,7 +26,7 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.TableName; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.classification.InterfaceAudience; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java index 1d20ba5a83..c96f663578 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare; import org.apache.hadoop.hbase.errorhandling.TimeoutExceptionInjector; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * A globally-barriered distributed procedure. This class encapsulates state and methods for diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java index 8a64cc89fe..0c9793d9aa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; -import com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; /** * This is the master side of a distributed complex procedure execution. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java index baed1f3148..4574e1fedd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.errorhandling.ForeignException; -import com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; /** * Process to kick off and manage a running {@link Subprocedure} on a member. This is the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java index 3092114746..09dad52ae2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java @@ -48,7 +48,7 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.MetaTableLocator; import org.apache.zookeeper.KeeperException; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class MasterFlushTableProcedureManager extends MasterProcedureManager { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/AverageIntervalRateLimiter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/AverageIntervalRateLimiter.java index 9320d7c025..863ba93c85 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/AverageIntervalRateLimiter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/AverageIntervalRateLimiter.java @@ -13,7 +13,7 @@ package org.apache.hadoop.hbase.quotas; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This limiter will refill resources at every TimeUnit/resources interval. For example: For a diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FixedIntervalRateLimiter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FixedIntervalRateLimiter.java index 5eb7e133aa..09f5315568 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FixedIntervalRateLimiter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FixedIntervalRateLimiter.java @@ -13,7 +13,7 @@ package org.apache.hadoop.hbase.quotas; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * With this limiter resources will be refilled only after a fixed interval of time. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java index 8f6a33aa70..3453b7a890 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java @@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.ThrottleReq import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Master Quota Manager. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NamespaceQuotaSnapshotStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NamespaceQuotaSnapshotStore.java index 170879ae46..baa19078e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NamespaceQuotaSnapshotStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NamespaceQuotaSnapshotStore.java @@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota; -import com.google.common.base.Predicate; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * {@link QuotaSnapshotStore} implementation for namespaces. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java index d0e3f90a54..4b5c88ed3f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.quotas; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java index 9600d1797d..8c9e780054 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaObserverChore.java @@ -43,10 +43,10 @@ import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * Reads the currently received Region filesystem-space use reports and acts on those which diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java index 28fbf0d7b8..fe0a983fd0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RateLimiter.java @@ -23,7 +23,7 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Simple rate limiter. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java index 756251a5e2..95cf02f2e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerRpcQuotaManager.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.security.UserGroupInformation; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Region Server Quota Manager. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java index 86bdf083d0..751fbd2fd6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionServerSpaceQuotaManager.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot.SpaceQuotaStatus; import org.apache.hadoop.hbase.regionserver.RegionServerServices; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * A manager for filesystem space quotas in the RegionServer. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java index 46f5a64286..f16db1ef42 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SnapshotQuotaObserverChore.java @@ -58,8 +58,8 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.HFileArchiveUtil; import org.apache.hadoop.util.StringUtils; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * A Master-invoked {@code Chore} that computes the size of each snapshot which was created from diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java index b94e643813..27e4c738a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/TableQuotaSnapshotStore.java @@ -42,8 +42,8 @@ import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota; -import com.google.common.base.Predicate; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * {@link QuotaSnapshotStore} for tables. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java index cff2b27c32..c1f1b1a4bd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AbstractMemStore.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.List; import java.util.NavigableSet; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java index 6c98c1d273..46eb9209fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/AnnotationReadingPriorityFunction.java @@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanReques import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message; import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat; import org.apache.hadoop.hbase.security.User; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkMap.java index a965ade5f2..daaa287bd9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellChunkMap.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.regionserver; import java.nio.ByteBuffer; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java index 0a4ea9eb80..48262a9e15 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CellSet.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Chunk.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Chunk.java index a45d801fa0..8743923b07 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Chunk.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Chunk.java @@ -23,8 +23,8 @@ import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * A chunk of memory out of which allocations are sliced. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java index d5501489be..38d7136b0f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java @@ -37,8 +37,8 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.HeapMemoryTuneObserver; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Does the management of memstoreLAB chunk creations. A monotonically incrementing id is associated diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java index e7157d0cfa..feb7d7b618 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java @@ -50,8 +50,8 @@ import org.apache.hadoop.hbase.util.StealJobQueue; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Compact region on request and then run split if appropriate diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java index 6b8948bf90..b016d3e7b5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactedHFilesDischarger.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.Store; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * A chore service that periodically cleans up the compacted files when there are no active readers diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java index 5b9372a826..808ced0847 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.ArrayList; import java.util.List; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java index 2f89ec7b83..16d0a425f7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java index d915f2edc2..d18e701060 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.Random; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java index 915d62fdaf..64a1524807 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFileManager.java @@ -36,9 +36,9 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration; -import com.google.common.collect.ImmutableCollection; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Default implementation of StoreFileManager. Not thread-safe. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java index 36e79291bb..e3ac57175a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java @@ -66,7 +66,8 @@ public class DelimitedKeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundR if (splitPoint != null && delimiter != null) { //find the first occurrence of delimiter in split point - int index = com.google.common.primitives.Bytes.indexOf(splitPoint, delimiter); + int index = + org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.indexOf(splitPoint, delimiter); if (index < 0) { LOG.warn("Delimiter " + Bytes.toString(delimiter) + " not found for split key " + Bytes.toString(splitPoint)); @@ -79,4 +80,4 @@ public class DelimitedKeyPrefixRegionSplitPolicy extends IncreasingToUpperBoundR return splitPoint; } } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index b460d1ac9e..9d9e183293 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Optional; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.io.Closeables; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; import java.io.EOFException; import java.io.FileNotFoundException; @@ -1719,7 +1719,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi this.metricsRegion.close(); } if (this.metricsRegionWrapper != null) { - Closeables.closeQuietly(this.metricsRegionWrapper); + Closeables.close(this.metricsRegionWrapper, true); } status.markComplete("Closed"); LOG.info("Closed " + this); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java index 59a0fe5d3d..10412605f4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java @@ -55,7 +55,7 @@ import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import edu.umd.cs.findbugs.annotations.Nullable; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index f0537e0f15..c8237a79a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -204,9 +204,9 @@ import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.data.Stat; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import sun.misc.Signal; import sun.misc.SignalHandler; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 9ab52c3644..66f013a164 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -93,12 +93,12 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableCollection; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * A Store holds a column family in a Region. Its a memstore and a set of zero diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index 2df15f9ae7..9b26173d56 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.util.BloomFilterFactory; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * A Store data file. Stores usually have one or more of these files. They diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java index 93e502a7fc..c87a5451e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemoryManager.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.io.hfile.ResizableBlockCache; import org.apache.hadoop.hbase.io.util.MemorySizeUtil; import org.apache.hadoop.util.ReflectionUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Manages tuning of Heap memory using HeapMemoryTuner. Most part of the heap memory is diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index a398ce9428..4c939074b7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java index 80b682533e..e307f95fb3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.HasThread; import org.apache.hadoop.ipc.RemoteException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Runs periodically to determine if the WAL should be rolled. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java index 08af7fe88b..4320da8a23 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactor.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java index 2f2a4cf4e1..08ec60a963 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.util.StringUtils.humanReadableInt; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.lang.Thread.UncaughtExceptionHandler; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java index 4fba82d24a..ba53348165 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLABImpl.java @@ -36,8 +36,8 @@ import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * A memstore-local allocation buffer. *

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java index 4c28763e05..b230fa809f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServer.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.metrics.MetricRegistries; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.Timer; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** *

diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.java index c5f0f7bef3..043c3c357b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsTableWrapperAggregateImpl.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsExecutor; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @InterfaceAudience.Private public class MetricsTableWrapperAggregateImpl implements MetricsTableWrapperAggregate, Closeable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java index ffcc834274..4746a9342d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java @@ -18,8 +18,8 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import java.util.LinkedList; import java.util.concurrent.atomic.AtomicLong; @@ -240,7 +240,7 @@ public class MultiVersionConcurrencyControl { @VisibleForTesting public String toString() { - return Objects.toStringHelper(this) + return MoreObjects.toStringHelper(this) .add("readPoint", readPoint) .add("writePoint", writePoint).toString(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java index 7361750ff0..0b8f9832e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.ClassSize; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * A mutable segment in memstore, specifically the active segment. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 8da34ced14..2a517b37fc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; import java.io.FileNotFoundException; import java.io.IOException; @@ -226,7 +226,7 @@ import org.apache.hadoop.hbase.wal.WALSplitter; import org.apache.hadoop.hbase.zookeeper.ZKSplitLog; import org.apache.zookeeper.KeeperException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Implements the regionserver RPC services. @@ -1349,7 +1349,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler, RegionScannerHolder rsh = new RegionScannerHolder(scannerName, s, r, closeCallback, shippedCallback, needCursor); RegionScannerHolder existing = scanners.putIfAbsent(scannerName, rsh); - assert existing == null : "scannerId must be unique within regionserver's whole lifecycle!"; + assert existing == null : "scannerId must be unique within regionserver's whole lifecycle! " + + scannerName; return rsh; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java index 63e18c3fe6..350421646d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Region.java @@ -51,7 +51,7 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Regions store data for a certain region of a table. It stores all columns diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 0abc9882c1..880374c86a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -80,8 +80,8 @@ import org.apache.hadoop.hbase.util.CoprocessorClassLoader; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALKey; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import com.google.protobuf.Message; import com.google.protobuf.Service; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java index e95932ba91..967dbe6e59 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionMergeRequest.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Handles processing region merges. Put in a queue, owned by HRegionServer. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java index e20b3e21cf..274f4899d6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.util.ReflectionUtils; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * A split policy determines when a region should be split. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java index 34901b7f5c..5ef26cc249 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowProcessor.java @@ -152,4 +152,4 @@ public interface RowProcessor { * @return The {@link Durability} to use */ Durability useDurability(); -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java index 2a66e55d15..35cac03aae 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScanInfo.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Immutable information for scans over a store. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.java index c6395564bd..7a781284fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerIdGenerator.java @@ -18,11 +18,13 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.hash.Hashing; +import org.apache.hadoop.hbase.shaded.com.google.common.hash.Hashing; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.nio.charset.Charset; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes; @@ -41,11 +43,19 @@ public class ScannerIdGenerator { private final AtomicInteger scannerIdGen = new AtomicInteger(0); public ScannerIdGenerator(ServerName serverName) { - this.serverNameHash = (long)Hashing.murmur3_32().hashString(serverName.toString()).asInt() << 32; + long hash = Hashing.murmur3_32().hashString(serverName.toString(), + java.nio.charset.StandardCharsets.UTF_8).asInt(); + this.serverNameHash = hash << 32; } public long generateNewScannerId() { return (scannerIdGen.incrementAndGet() & 0x00000000FFFFFFFFL) | serverNameHash; } -} + public static void main(final String [] args) { + ScannerIdGenerator sig = new ScannerIdGenerator(ServerName.valueOf("a.example.org,1234,5678")); + for (int i = 0; i < 10; i++) { + System.out.println(sig.generateNewScannerId()); + } + } +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java index 8f43fa8db3..a264567b9f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Segment.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This is an abstraction of a segment maintained in a memstore, e.g., the active diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java index 1a8b89d75c..958b0bd81e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SegmentFactory.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java index 874ca44e79..61f5b67871 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ServerNonceManager.java @@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.NonceKey; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Implementation of nonce manager that stores nonces in a hash map and cleans them up after diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java index a2f40b1a05..3ac5d04318 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.ExceptionUtil; import org.apache.hadoop.hbase.util.FSUtils; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * This worker is spawned in every regionserver, including master. The Worker waits for log diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java index bd59c537fd..3a9a7777f7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Handles processing region splits. Put in a queue, owned by HRegionServer. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java index 961e3388fd..1c1a14389b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileComparators.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Ordering; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering; import java.util.Comparator; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java index eb760edd80..1b6523dbc8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileManager.java @@ -24,7 +24,7 @@ import java.util.Comparator; import java.util.Iterator; import java.util.List; -import com.google.common.collect.ImmutableCollection; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java index ee7d132aed..15bf12139f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.DataInput; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java index 0d94378d7a..a8773c0c3d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java index 11301d865b..d353c4de32 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java @@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher; import org.apache.hadoop.hbase.util.CollectionUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Scanner scans both the memstore and the Store. Coalesce KeyValue stream into List<KeyValue> diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java index 1e78ab2cf5..2e17e183d4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreEngine.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.security.User; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * The storage engine that implements the stripe-based store/compaction scheme. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java index 18a6eec99f..153677471c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java @@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ConcatenatedLists; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; -import com.google.common.collect.ImmutableCollection; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableCollection; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; /** * Stripe implementation of StoreFileManager. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java index 3f9688dd41..bb352dcffc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy; import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Stripe implementation of StoreFlusher. Flushes files either into L0 file w/o metadata, or diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java index 716a820e58..340b780123 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.regionserver.StoreFileReader; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * This class holds all logical details necessary to run a compaction. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java index 463ed86b95..1b8704ca07 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java @@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; -import com.google.common.io.Closeables; +import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; /** * A compactor is a compaction algorithm associated a given policy. Base class also contains diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java index de461e9b4f..20c54135e7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DateTieredCompactionPolicy.java @@ -39,11 +39,11 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ReflectionUtils; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import com.google.common.collect.PeekingIterator; -import com.google.common.math.LongMath; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.PeekingIterator; +import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath; /** * HBASE-15181 This is a simple implementation of date-based tiered compaction similar to diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java index 020b526b97..cd35df1d41 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/DefaultCompactor.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController; import org.apache.hadoop.hbase.security.User; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Compact passed set of files. Create an instance and then call diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java index 33ba9a5d1d..0a387c1534 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/ExponentialCompactionWindowFactory.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver.compactions; -import com.google.common.math.LongMath; +import org.apache.hadoop.hbase.shaded.com.google.common.math.LongMath; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java index 232e5528a4..a1d4c561f9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java @@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.regionserver.StoreConfigInformation; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreUtils; -import com.google.common.base.Preconditions; -import com.google.common.base.Predicate; -import com.google.common.collect.Collections2; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Collections2; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * An abstract compaction policy that select files on seq id order. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java index 0b66d3df97..bc12673c3f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactionPolicy.java @@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.util.ConcatenatedLists; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; /** * Stripe store implementation of compaction policy. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java index 11dd51fdb9..8cdcd40192 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/LegacyScanQueryMatcher.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver.querymatcher; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.util.Arrays; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java index f32d0edc66..a79d4c83b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AbstractFSWAL.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import static com.google.common.base.Preconditions.*; +import static org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions.*; import static org.apache.hadoop.hbase.wal.AbstractFSWALProvider.WAL_FILE_NAME_DELIMITER; import java.io.IOException; @@ -74,7 +74,7 @@ import org.apache.htrace.Span; import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import com.lmax.disruptor.RingBuffer; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java index 69ca1c523c..7e38527aa8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncFSWAL.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver.wal; import static org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputHelper.shouldRetryCreate; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import com.lmax.disruptor.RingBuffer; import com.lmax.disruptor.Sequence; import com.lmax.disruptor.Sequencer; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java index e1f7b8f5ef..585df88361 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import io.netty.channel.EventLoop; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java index 4032cde588..bb72520760 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.io.util.Dictionary; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALFactory; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index 77ac1d1ab0..0aa0eb8794 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import com.lmax.disruptor.BlockingWaitStrategy; import com.lmax.disruptor.EventHandler; import com.lmax.disruptor.ExceptionHandler; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java index 3b8525c910..8423f1ff71 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.Collections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java index 69a31cdd6f..795ca36c0d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java index b065a59345..8797dfb6f3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceIdAccounting.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver.wal; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.util.ArrayList; import java.util.Collections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java index 0f32a1d9bf..7ac455060b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java index cf141c140a..0e619ea4d0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseReplicationEndpoint.java @@ -25,8 +25,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience; -import com.google.common.collect.Lists; -import com.google.common.util.concurrent.AbstractService; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AbstractService; import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; /** @@ -111,5 +111,4 @@ public abstract class BaseReplicationEndpoint extends AbstractService public boolean canReplicateToSameCluster() { return false; } - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java index 86fc1fa762..e832d6bd56 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BulkLoadCellFilter.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescr import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; public class BulkLoadCellFilter { private static final Log LOG = LogFactory.getLog(BulkLoadCellFilter.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java index 23df804ac8..1bc18a9dfe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/HBaseReplicationEndpoint.java @@ -223,4 +223,4 @@ public abstract class HBaseReplicationEndpoint extends BaseReplicationEndpoint } } } -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java index 2673cbd7d8..8a277ffc47 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/NamespaceTableCfWALEntryFilter.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; /** * Filter a WAL Entry by namespaces and table-cfs config in the peer. It first filter entry diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java index 69db31c15e..6bf696b2c0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ReplicationEndpoint.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.wal.WAL.Entry; import org.apache.hadoop.hbase.replication.regionserver.MetricsSource; -import com.google.common.util.concurrent.Service; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service; /** * ReplicationEndpoint is a plugin which implements replication @@ -176,5 +176,4 @@ public interface ReplicationEndpoint extends Service, ReplicationPeerConfigListe * parameters can be obtained. */ boolean replicate(ReplicateContext replicateContext); - } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java index b084a04cd0..69421ef587 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/ScopeWALEntryFilter.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; /** * Keeps KVs that are scoped other than local diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java index 4c862446c0..63c44f40e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationHFileCleaner.java @@ -10,11 +10,11 @@ */ package org.apache.hadoop.hbase.replication.master; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Predicate; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import java.io.IOException; import java.util.Collections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java index 074c1137f5..73249769b3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.replication.master; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -38,10 +38,10 @@ import java.util.Collections; import java.util.List; import java.util.Set; -import com.google.common.base.Predicate; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.zookeeper.KeeperException; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java index 2bedbfd17b..40a1fc93c1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java @@ -59,7 +59,7 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.zookeeper.KeeperException; -import com.google.common.util.concurrent.AtomicLongMap; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLongMap; /** * Provides information about the existing states of replication, replication peers and queues. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java index a40c22d70f..6c795378fa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.replication.regionserver; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.net.ConnectException; @@ -388,13 +388,6 @@ public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoi notifyStopped(); } - // is this needed? Nobody else will call doStop() otherwise - @Override - public State stopAndWait() { - doStop(); - return super.stopAndWait(); - } - @VisibleForTesting protected Replicator createReplicator(List entries, int ordinal) { return new Replicator(entries, ordinal); @@ -453,4 +446,4 @@ public class HBaseInterClusterReplicationEndpoint extends HBaseReplicationEndpoi } } } -} +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java index c091b44e74..b3556c6318 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/HFileReplicator.java @@ -10,7 +10,7 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java index 3e0de45f72..9f600dad77 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java @@ -75,9 +75,9 @@ import org.apache.hadoop.hbase.wal.WALSplitter.RegionEntryBuffer; import org.apache.hadoop.hbase.wal.WALSplitter.SinkWriter; import org.apache.hadoop.util.StringUtils; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * A {@link org.apache.hadoop.hbase.replication.ReplicationEndpoint} endpoint diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java index 6ff666eb35..b8b6fbb874 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java @@ -61,7 +61,7 @@ import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.zookeeper.KeeperException; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Gateway to Replication. Used by {@link org.apache.hadoop.hbase.regionserver.HRegionServer}. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java index b038db8f1d..eb882f3a49 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import java.io.IOException; import java.util.Collections; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java index 3d4353fbfc..120c697ea6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.replication.regionserver; -import com.google.common.collect.Lists; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.Service; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service; import java.io.IOException; import java.util.ArrayList; @@ -33,6 +33,7 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang.StringUtils; @@ -244,9 +245,12 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf this.sourceRunning = true; try { // start the endpoint, connect to the cluster - Service.State state = replicationEndpoint.start().get(); - if (state != Service.State.RUNNING) { - LOG.warn("ReplicationEndpoint was not started. Exiting"); + Service service = replicationEndpoint.startAsync(); + final int waitTime = 10; + service.awaitRunning(waitTime, TimeUnit.SECONDS); + if (!service.isRunning()) { + LOG.warn("ReplicationEndpoint was not started after waiting " + waitTime + + " + seconds. Exiting"); uninitialize(); return; } @@ -381,7 +385,13 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf metrics.clear(); if (replicationEndpoint.state() == Service.State.STARTING || replicationEndpoint.state() == Service.State.RUNNING) { - replicationEndpoint.stopAndWait(); + replicationEndpoint.stopAsync(); + final int waitTime = 10; + try { + replicationEndpoint.awaitTerminated(waitTime, TimeUnit.SECONDS); + } catch (TimeoutException e) { + LOG.warn("Failed termination after " + waitTime + " seconds."); + } } } @@ -453,22 +463,22 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf worker.entryReader.interrupt(); worker.interrupt(); } - ListenableFuture future = null; + Service service = null; if (this.replicationEndpoint != null) { - future = this.replicationEndpoint.stop(); + service = this.replicationEndpoint.stopAsync(); } if (join) { for (ReplicationSourceShipper worker : workers) { Threads.shutdown(worker, this.sleepForRetries); LOG.info("ReplicationSourceWorker " + worker.getName() + " terminated"); } - if (future != null) { + if (service != null) { try { - future.get(sleepForRetries * maxRetriesMultiplier, TimeUnit.MILLISECONDS); - } catch (Exception e) { + service.awaitTerminated(sleepForRetries * maxRetriesMultiplier, TimeUnit.MILLISECONDS); + } catch (TimeoutException te) { LOG.warn("Got exception while waiting for endpoint to shutdown for replication source :" + this.peerClusterZnode, - e); + te); } } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java index 5b5e207e8b..a809832050 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.replication.regionserver; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java index 3e1e50be6c..069476359e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceShipper.java @@ -42,9 +42,9 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheLoader; +import org.apache.hadoop.hbase.shaded.com.google.common.cache.LoadingCache; /** * This thread reads entries from a queue and ships them. Entries are placed onto the queue by diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java index b719eba2f9..b61f90985c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.security.access; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.ByteArrayInputStream; import java.io.DataInput; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index aa0c09458e..da3462258d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -123,13 +123,13 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.ListMultimap; -import com.google.common.collect.Lists; -import com.google.common.collect.MapMaker; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import com.google.protobuf.Message; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java index bf05dc10b8..3ace9683bb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AuthResult.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; /** * Represents the result of an authorization check for logging and error diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java index 84cda91df9..981cc2b023 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java @@ -20,10 +20,10 @@ package org.apache.hadoop.hbase.security.access; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 8d0a46f97b..7e4ca14a4a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.io.Text; @@ -89,8 +88,8 @@ public class TokenUtil { */ public static AuthenticationProtos.Token toToken(Token token) { AuthenticationProtos.Token.Builder builder = AuthenticationProtos.Token.newBuilder(); - builder.setIdentifier(ByteStringer.wrap(token.getIdentifier())); - builder.setPassword(ByteStringer.wrap(token.getPassword())); + builder.setIdentifier(ByteString.copyFrom(token.getIdentifier())); + builder.setPassword(ByteString.copyFrom(token.getPassword())); if (token.getService() != null) { builder.setService(ByteString.copyFromUtf8(token.getService().toString())); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java index f9d716b4a8..e5f7a6590d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.security.token; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.util.List; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java index 2b9a56ee42..71b582bee2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java @@ -86,7 +86,7 @@ public class ExpressionParser { // This could be costly. but do we have any alternative? // If we don't do this way then we may have to handle while checking the authorizations. // Better to do it here. - byte[] array = com.google.common.primitives.Bytes.toArray(list); + byte[] array = org.apache.hadoop.hbase.shaded.com.google.common.primitives.Bytes.toArray(list); String leafExp = Bytes.toString(array).trim(); if (leafExp.isEmpty()) { throw new ParseException("Error parsing expression " + expS + " at column : " + index); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java index 476921b370..529ae0b3f3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityController.java @@ -104,13 +104,12 @@ import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.AccessController; -import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.StringUtils; -import com.google.common.collect.Lists; -import com.google.common.collect.MapMaker; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; import com.google.protobuf.ByteString; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; @@ -942,7 +941,7 @@ public class VisibilityController implements MasterObserver, RegionObserver, response.setUser(request.getUser()); if (labels != null) { for (String label : labels) { - response.addAuth(ByteStringer.wrap(Bytes.toBytes(label))); + response.addAuth(ByteString.copyFrom(Bytes.toBytes(label))); } } } @@ -1030,7 +1029,7 @@ public class VisibilityController implements MasterObserver, RegionObserver, } if (labels != null && !labels.isEmpty()) { for (String label : labels) { - response.addLabel(ByteStringer.wrap(Bytes.toBytes(label))); + response.addLabel(ByteString.copyFrom(Bytes.toBytes(label))); } } } @@ -1122,4 +1121,4 @@ public class VisibilityController implements MasterObserver, RegionObserver, ByteString.copyFromUtf8(StringUtils.stringifyException(t))); return parameterBuilder.build(); } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java index c77b7761ac..51655a1d74 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java @@ -21,6 +21,9 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -36,7 +39,8 @@ import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.WALEntryFilter; import org.apache.hadoop.hbase.wal.WAL.Entry; -import com.google.common.util.concurrent.ListenableFuture; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ListenableFuture; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.Service; @InterfaceAudience.Private public class VisibilityReplicationEndpoint implements ReplicationEndpoint { @@ -134,33 +138,52 @@ public class VisibilityReplicationEndpoint implements ReplicationEndpoint { } @Override + public Service startAsync() { + return this.delegator.startAsync(); + } + + @Override public boolean isRunning() { return delegator.isRunning(); } @Override - public ListenableFuture start() { - return delegator.start(); + public State state() { + return delegator.state(); } @Override - public State startAndWait() { - return delegator.startAndWait(); + public Service stopAsync() { + return this.delegator.stopAsync(); } @Override - public State state() { - return delegator.state(); + public void awaitRunning() { + this.delegator.awaitRunning(); } @Override - public ListenableFuture stop() { - return delegator.stop(); + public void awaitRunning(long l, TimeUnit timeUnit) throws TimeoutException { + this.delegator.awaitRunning(l, timeUnit); } @Override - public State stopAndWait() { - return delegator.stopAndWait(); + public void awaitTerminated() { + this.delegator.awaitTerminated(); } -} + @Override + public void awaitTerminated(long l, TimeUnit timeUnit) throws TimeoutException { + this.delegator.awaitTerminated(l, timeUnit); + } + + @Override + public Throwable failureCause() { + return this.delegator.failureCause(); + } + + @Override + public void addListener(Listener listener, Executor executor) { + this.delegator.addListener(listener, executor); + } +} \ No newline at end of file diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java index 4441c086e9..f75e2f06e8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java @@ -59,8 +59,8 @@ import org.apache.hadoop.hbase.security.visibility.expression.ExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.LeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode; import org.apache.hadoop.hbase.security.visibility.expression.Operator; +import com.google.protobuf.ByteString; import org.apache.hadoop.hbase.util.ByteRange; -import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.util.ReflectionUtils; @@ -93,7 +93,7 @@ public class VisibilityUtils { VisibilityLabelsRequest.Builder visReqBuilder = VisibilityLabelsRequest.newBuilder(); for (Entry entry : existingLabels.entrySet()) { VisibilityLabel.Builder visLabBuilder = VisibilityLabel.newBuilder(); - visLabBuilder.setLabel(ByteStringer.wrap(Bytes.toBytes(entry.getKey()))); + visLabBuilder.setLabel(ByteString.copyFrom(Bytes.toBytes(entry.getKey()))); visLabBuilder.setOrdinal(entry.getValue()); visReqBuilder.addVisLabel(visLabBuilder.build()); } @@ -109,7 +109,7 @@ public class VisibilityUtils { MultiUserAuthorizations.Builder builder = MultiUserAuthorizations.newBuilder(); for (Entry> entry : userAuths.entrySet()) { UserAuthorizations.Builder userAuthsBuilder = UserAuthorizations.newBuilder(); - userAuthsBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(entry.getKey()))); + userAuthsBuilder.setUser(ByteString.copyFrom(Bytes.toBytes(entry.getKey()))); for (Integer label : entry.getValue()) { userAuthsBuilder.addAuth(label); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java index 683c404c69..9875ac0b0f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java @@ -34,7 +34,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ThreadPoolExecutor; -import com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java index c471337db3..55afb44cb7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.Collections; -import com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index 1e1aa9a96c..7885be059e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.tool; import static org.apache.hadoop.hbase.HConstants.DEFAULT_ZOOKEEPER_ZNODE_PARENT; import static org.apache.hadoop.hbase.HConstants.ZOOKEEPER_ZNODE_PARENT; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java index 47cb39475a..39364ac03c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.util; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.util.StringUtils; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java index 9013bab8b5..8e56237773 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java @@ -37,7 +37,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** * Utilities for interacting with and monitoring DirectByteBuffer allocations. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java index de49d389ba..5817a77b78 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java @@ -29,7 +29,7 @@ import java.util.Map; import java.util.Set; import java.util.Collection; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java index dcd3144d2a..eb6b7660bb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java @@ -27,8 +27,8 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.primitives.Ints; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints; import edu.umd.cs.findbugs.annotations.Nullable; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.logging.Log; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 284b786e00..6a6d689f61 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -18,11 +18,11 @@ */ package org.apache.hadoop.hbase.util; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Throwables; -import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import com.google.common.primitives.Ints; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints; import edu.umd.cs.findbugs.annotations.CheckForNull; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index f1e42a683d..d4f8e4585e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.hbase.util; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Multimap; -import com.google.common.collect.Ordering; -import com.google.common.collect.TreeMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Ordering; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap; import com.google.protobuf.ServiceException; import java.io.Closeable; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java index e5dbae2084..9328a94ceb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdLock.java @@ -25,7 +25,7 @@ import java.util.concurrent.ConcurrentMap; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Allows multiple concurrent clients to lock on a numeric id with a minimal diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java index 2a83029313..aa48fc77b4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java @@ -23,7 +23,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Allows multiple concurrent clients to lock on a numeric id with ReentrantReadWriteLock. The diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java index 9288a45cca..7c55a1ce7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JvmPauseMonitor.java @@ -22,6 +22,7 @@ import java.lang.management.ManagementFactory; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -29,12 +30,12 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource; import org.apache.hadoop.conf.Configuration; -import com.google.common.base.Joiner; -import com.google.common.base.Preconditions; -import com.google.common.base.Stopwatch; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * Class which sets up a simple thread which runs in a loop sleeping @@ -145,7 +146,7 @@ public class JvmPauseMonitor { private class Monitor implements Runnable { @Override public void run() { - Stopwatch sw = new Stopwatch(); + Stopwatch sw = Stopwatch.createUnstarted(); Map gcTimesBeforeSleep = getGcTimes(); while (shouldRun) { sw.reset().start(); @@ -155,7 +156,7 @@ public class JvmPauseMonitor { return; } - long extraSleepTime = sw.elapsedMillis() - SLEEP_INTERVAL_MS; + long extraSleepTime = sw.elapsed(TimeUnit.MILLISECONDS) - SLEEP_INTERVAL_MS; Map gcTimesAfterSleep = getGcTimes(); if (extraSleepTime > infoThresholdMs) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java index 824963028e..99769b7d88 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSizeCalculator.java @@ -25,7 +25,7 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java index e07966e475..48c4321474 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java @@ -31,9 +31,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Multimap; -import com.google.common.collect.TreeMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.TreeMultimap; /** * This is a generic region split calculator. It requires Ranges that provide diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java index 87ff010668..4ca272418c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java @@ -62,10 +62,10 @@ import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * The {@link RegionSplitter} class provides several utilities to help in the diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java index 28b7fda285..4a385ee259 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AbstractFSWALProvider.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.LeaseNotRecoveredException; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * Base class of a WAL Provider that returns a single thread safe WAL that writes to Hadoop FS. By diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java index 786f58af59..51a5459b55 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/AsyncFSWALProvider.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.wal; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import io.netty.channel.EventLoop; import io.netty.channel.EventLoopGroup; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java index 2ae20cf680..64440dff70 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.wal; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.Closeable; import java.io.IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java index 114715fabe..90ce7e5136 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.wal; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java index 1d84c4b7ea..751ceba793 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.wal; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index 517a0cd115..da828ded1e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.wal; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.EOFException; import java.io.FileNotFoundException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java index c6f22ffede..8432a1b10f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java @@ -41,7 +41,7 @@ import org.apache.zookeeper.server.NIOServerCnxnFactory; import org.apache.zookeeper.server.ZooKeeperServer; import org.apache.zookeeper.server.persistence.FileTxnLog; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; /** * TODO: Most of the code in this class is ripped from ZooKeeper tests. Instead diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java index 6981c8af95..2014b5b23a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/GenericTestUtils.java @@ -44,9 +44,9 @@ import org.junit.Assert; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.common.base.Joiner; -import com.google.common.base.Supplier; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Supplier; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * Test provides some very generic helpers which might be used across the tests diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 5c8b29b894..33069b684f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -1896,8 +1896,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { * @param tableName * @param startKey * @param stopKey - * @param callingMethod - * @param conf * @param isReadOnly * @param families * @return A region on which you must call @@ -3300,7 +3298,6 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility { /** * Uses directly the assignment manager to assign the region. * and waits until the specified region has completed assignment. - * @param tableName the table name * @throws IOException * @throw InterruptedException * @return true if the region is assigned false otherwise. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 2c5cb65aae..e4dfefb8ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -100,8 +100,8 @@ import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; import org.apache.htrace.impl.ProbabilitySampler; -import com.google.common.base.Objects; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import com.codahale.metrics.Histogram; import com.codahale.metrics.UniformReservoir; @@ -333,7 +333,7 @@ public class PerformanceEvaluation extends Configured implements Tool { || (!isReadCmd && desc != null && desc.getRegionReplication() != opts.replicas)) { needsDelete = true; // wait, why did it delete my table?!? - LOG.debug(Objects.toStringHelper("needsDelete") + LOG.debug(MoreObjects.toStringHelper("needsDelete") .add("needsDelete", needsDelete) .add("isReadCmd", isReadCmd) .add("exists", exists) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java index 24e9590d9c..e669f14be1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java @@ -47,7 +47,7 @@ import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; -import com.google.common.base.Stopwatch; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; /** * A simple performance evaluation tool for single client and MR scans @@ -102,8 +102,8 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { FileSystem fs = filename.getFileSystem(getConf()); // read the file from start to finish - Stopwatch fileOpenTimer = new Stopwatch(); - Stopwatch streamTimer = new Stopwatch(); + Stopwatch fileOpenTimer = Stopwatch.createUnstarted(); + Stopwatch streamTimer = Stopwatch.createUnstarted(); fileOpenTimer.start(); FSDataInputStream in = fs.open(filename); @@ -120,11 +120,12 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { } streamTimer.stop(); - double throughput = (double)totalBytes / streamTimer.elapsedTime(TimeUnit.SECONDS); + double throughput = (double)totalBytes / streamTimer.elapsed(TimeUnit.SECONDS); System.out.println("HDFS streaming: "); - System.out.println("total time to open: " + fileOpenTimer.elapsedMillis() + " ms"); - System.out.println("total time to read: " + streamTimer.elapsedMillis() + " ms"); + System.out.println("total time to open: " + + fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to read: " + streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println("total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); System.out.println("throghput : " + StringUtils.humanReadableInt((long)throughput) + "B/s"); @@ -143,9 +144,9 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { } public void testScan() throws IOException { - Stopwatch tableOpenTimer = new Stopwatch(); - Stopwatch scanOpenTimer = new Stopwatch(); - Stopwatch scanTimer = new Stopwatch(); + Stopwatch tableOpenTimer = Stopwatch.createUnstarted(); + Stopwatch scanOpenTimer = Stopwatch.createUnstarted(); + Stopwatch scanTimer = Stopwatch.createUnstarted(); tableOpenTimer.start(); Connection connection = ConnectionFactory.createConnection(getConf()); @@ -176,14 +177,17 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { ScanMetrics metrics = scan.getScanMetrics(); long totalBytes = metrics.countOfBytesInResults.get(); - double throughput = (double)totalBytes / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputRows = (double)numRows / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputCells = (double)numCells / scanTimer.elapsedTime(TimeUnit.SECONDS); + double throughput = (double)totalBytes / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputRows = (double)numRows / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputCells = (double)numCells / scanTimer.elapsed(TimeUnit.SECONDS); System.out.println("HBase scan: "); - System.out.println("total time to open table: " + tableOpenTimer.elapsedMillis() + " ms"); - System.out.println("total time to open scanner: " + scanOpenTimer.elapsedMillis() + " ms"); - System.out.println("total time to scan: " + scanTimer.elapsedMillis() + " ms"); + System.out.println("total time to open table: " + + tableOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to open scanner: " + + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to scan: " + + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println("Scan metrics:\n" + metrics.getMetricsMap()); @@ -198,9 +202,9 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { public void testSnapshotScan() throws IOException { - Stopwatch snapshotRestoreTimer = new Stopwatch(); - Stopwatch scanOpenTimer = new Stopwatch(); - Stopwatch scanTimer = new Stopwatch(); + Stopwatch snapshotRestoreTimer = Stopwatch.createUnstarted(); + Stopwatch scanOpenTimer = Stopwatch.createUnstarted(); + Stopwatch scanTimer = Stopwatch.createUnstarted(); Path restoreDir = new Path(this.restoreDir); @@ -230,14 +234,17 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { ScanMetrics metrics = scanner.getScanMetrics(); long totalBytes = metrics.countOfBytesInResults.get(); - double throughput = (double)totalBytes / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputRows = (double)numRows / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputCells = (double)numCells / scanTimer.elapsedTime(TimeUnit.SECONDS); + double throughput = (double)totalBytes / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputRows = (double)numRows / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputCells = (double)numCells / scanTimer.elapsed(TimeUnit.SECONDS); System.out.println("HBase scan snapshot: "); - System.out.println("total time to restore snapshot: " + snapshotRestoreTimer.elapsedMillis() + " ms"); - System.out.println("total time to open scanner: " + scanOpenTimer.elapsedMillis() + " ms"); - System.out.println("total time to scan: " + scanTimer.elapsedMillis() + " ms"); + System.out.println("total time to restore snapshot: " + + snapshotRestoreTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to open scanner: " + + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to scan: " + + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println("Scan metrics:\n" + metrics.getMetricsMap()); @@ -267,8 +274,8 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { } public void testScanMapReduce() throws IOException, InterruptedException, ClassNotFoundException { - Stopwatch scanOpenTimer = new Stopwatch(); - Stopwatch scanTimer = new Stopwatch(); + Stopwatch scanOpenTimer = Stopwatch.createUnstarted(); + Stopwatch scanTimer = Stopwatch.createUnstarted(); Scan scan = getScan(); @@ -302,13 +309,14 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { long numCells = counters.findCounter(ScanCounter.NUM_CELLS).getValue(); long totalBytes = counters.findCounter(HBASE_COUNTER_GROUP_NAME, "BYTES_IN_RESULTS").getValue(); - double throughput = (double)totalBytes / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputRows = (double)numRows / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputCells = (double)numCells / scanTimer.elapsedTime(TimeUnit.SECONDS); + double throughput = (double)totalBytes / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputRows = (double)numRows / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputCells = (double)numCells / scanTimer.elapsed(TimeUnit.SECONDS); System.out.println("HBase scan mapreduce: "); - System.out.println("total time to open scanner: " + scanOpenTimer.elapsedMillis() + " ms"); - System.out.println("total time to scan: " + scanTimer.elapsedMillis() + " ms"); + System.out.println("total time to open scanner: " + + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println("total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); @@ -320,8 +328,8 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { } public void testSnapshotScanMapReduce() throws IOException, InterruptedException, ClassNotFoundException { - Stopwatch scanOpenTimer = new Stopwatch(); - Stopwatch scanTimer = new Stopwatch(); + Stopwatch scanOpenTimer = Stopwatch.createUnstarted(); + Stopwatch scanTimer = Stopwatch.createUnstarted(); Scan scan = getScan(); @@ -357,13 +365,14 @@ public class ScanPerformanceEvaluation extends AbstractHBaseTool { long numCells = counters.findCounter(ScanCounter.NUM_CELLS).getValue(); long totalBytes = counters.findCounter(HBASE_COUNTER_GROUP_NAME, "BYTES_IN_RESULTS").getValue(); - double throughput = (double)totalBytes / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputRows = (double)numRows / scanTimer.elapsedTime(TimeUnit.SECONDS); - double throughputCells = (double)numCells / scanTimer.elapsedTime(TimeUnit.SECONDS); + double throughput = (double)totalBytes / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputRows = (double)numRows / scanTimer.elapsed(TimeUnit.SECONDS); + double throughputCells = (double)numCells / scanTimer.elapsed(TimeUnit.SECONDS); System.out.println("HBase scan mapreduce: "); - System.out.println("total time to open scanner: " + scanOpenTimer.elapsedMillis() + " ms"); - System.out.println("total time to scan: " + scanTimer.elapsedMillis() + " ms"); + System.out.println("total time to open scanner: " + + scanOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); + System.out.println("total time to scan: " + scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms"); System.out.println("total bytes: " + totalBytes + " bytes (" + StringUtils.humanReadableInt(totalBytes) + ")"); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java index b97f9c1562..9e845ad092 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java @@ -57,7 +57,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java index 7640138108..a756af4c22 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIOFencing.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.Collection; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java index f921352b09..6e9454f108 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessor.java @@ -64,7 +64,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java index 70fa8f3b90..e319a95450 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestNamespace.java @@ -48,7 +48,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.junit.rules.TestName; @Category({MiscTests.class, MediumTests.class}) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java index ebd9f94cf8..b787eb5d86 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java @@ -21,8 +21,8 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.*; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.MediumTests; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBoundaryTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBoundaryTests.java index 280314becb..40a01b9a1a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBoundaryTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBoundaryTests.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.testclassification.LargeTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestBackupBoundaryTests extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDelete.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDelete.java index 38724fb783..ebfc735513 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDelete.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDelete.java @@ -35,7 +35,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestBackupDelete extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteRestore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteRestore.java index 09218710e9..208e0818ff 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteRestore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteRestore.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(MediumTests.class) public class TestBackupDeleteRestore extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java index ae295498fb..59309ee1df 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java @@ -46,7 +46,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * This class is only a base for other integration-level backup tests. Do not add tests here. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDescribe.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDescribe.java index 38aa30dc20..0672325865 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDescribe.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupDescribe.java @@ -36,7 +36,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestBackupDescribe extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java index 1cb7fec076..0944ea2cc7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java @@ -40,7 +40,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Create multiple backups for two tables: table1, table2 then perform 1 delete diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java index 740c396aee..4e922a2eb2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupShowHistory.java @@ -34,7 +34,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestBackupShowHistory extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupStatusProgress.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupStatusProgress.java index a904d9b6f1..73d8d9fd30 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupStatusProgress.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestBackupStatusProgress.java @@ -33,7 +33,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestBackupStatusProgress extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java index 61bad6891a..48a553f5c7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestFullRestore.java @@ -28,7 +28,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestFullRestore extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java index 77bed436fb..b6fb880cb2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java @@ -47,7 +47,7 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) @RunWith(Parameterized.class) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupDeleteTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupDeleteTable.java index eae25b5dac..747c1ddb16 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupDeleteTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupDeleteTable.java @@ -38,7 +38,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * 1. Create table t1, t2 diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java index c10ec40660..769785f283 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java @@ -46,7 +46,7 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * 1. Create table t1 diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithFailures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithFailures.java index b30bf91e95..84a596eb5c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithFailures.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithFailures.java @@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) @RunWith(Parameterized.class) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java index 63f90fbf19..36a9ee297b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRemoteBackup.java @@ -35,7 +35,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestRemoteBackup extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java index 9beb47b01a..9c476419de 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java @@ -35,7 +35,7 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestRepairAfterFailedDelete extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/master/TestBackupLogCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/master/TestBackupLogCleaner.java index 229597b57a..5f72f458b9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/master/TestBackupLogCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/master/TestBackupLogCleaner.java @@ -49,8 +49,8 @@ import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestBackupLogCleaner extends TestBackupBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleRawScanResultConsumer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleRawScanResultConsumer.java index 026a21f89e..9520a475bb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleRawScanResultConsumer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleRawScanResultConsumer.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import java.io.IOException; import java.util.ArrayDeque; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleScanResultConsumer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleScanResultConsumer.java index 168129d53d..ea0ed4289d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleScanResultConsumer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/SimpleScanResultConsumer.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.client; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import java.util.ArrayList; import java.util.List; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java index e1277fa7a6..a0cf4d2e6f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestEnableTable.java @@ -53,9 +53,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.base.Predicate; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Predicate; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({ MasterTests.class, MediumTests.class }) public class TestEnableTable { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java index e99ee07374..037a538060 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestHCM.java @@ -86,7 +86,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * This class is for testing HBaseConnectionManager features diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java index 1e6b0fa1e5..4ef354998e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSizeFailures.java @@ -39,7 +39,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @Category(LargeTests.class) public class TestSizeFailures { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index f2f3b26ed6..7653844761 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -54,7 +54,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java index 8412e77185..ba07755c13 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestTableFavoredNodes.java @@ -65,8 +65,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; @Category({ClientTests.class, MediumTests.class}) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java index 26113a6c28..16290320c6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java @@ -38,8 +38,8 @@ import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.io.CountingInputStream; -import com.google.common.io.CountingOutputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingInputStream; +import org.apache.hadoop.hbase.shaded.com.google.common.io.CountingOutputStream; @Category({MiscTests.class, SmallTests.class}) public class TestCellMessageCodec { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java index c508b02823..f0a4b6833e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java @@ -24,7 +24,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import java.io.IOException; import java.util.List; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java index e7b71ecadf..f9977513f8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorMetrics.java @@ -71,7 +71,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java index 58af88ee6b..9eb1f36cd4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverForAddingMutationsFromCoprocessors.java @@ -54,7 +54,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(MediumTests.class) public class TestRegionObserverForAddingMutationsFromCoprocessors { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/favored/TestFavoredNodeAssignmentHelper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/favored/TestFavoredNodeAssignmentHelper.java index d624c93392..8d0fe19d4e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/favored/TestFavoredNodeAssignmentHelper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/favored/TestFavoredNodeAssignmentHelper.java @@ -50,8 +50,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Category({MasterTests.class, SmallTests.class}) public class TestFavoredNodeAssignmentHelper { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index a403c24718..17cddae1f2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -61,7 +61,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index cf8a0a0ca6..bcb6617fc6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.filter; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java index 989a93bc51..36cf068395 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java @@ -49,7 +49,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java index 3c11efeb76..686c2a29c0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java @@ -54,7 +54,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java index 8d947665c5..b925ff64d0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java @@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache; import org.apache.hadoop.hbase.nio.ByteBuff; import org.apache.hadoop.hbase.util.ChecksumType; -import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; public class CacheTestUtils { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 49807a3749..d4f8a59d42 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -71,7 +71,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Tests {@link HFile} cache-on-write functionality for the following block diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java index 9253ce1681..d92453a03a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestLazyDataBlockDecompression.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.io.hfile; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java index 2bd750eb4e..4a4272f74e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/AbstractTestIPC.java @@ -62,8 +62,8 @@ import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.util.StringUtils; import org.junit.Test; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Some basic ipc tests. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java index e8f6464ff7..2c49a41fde 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestBufferChain.java @@ -34,8 +34,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.common.base.Charsets; -import com.google.common.io.Files; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Charsets; +import org.apache.hadoop.hbase.shaded.com.google.common.io.Files; @Category({RPCTests.class, SmallTests.class}) public class TestBufferChain { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java index f21359c865..3e8e25e6aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestProtoBufRpc.java @@ -47,7 +47,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Test for testing protocol buffer based RPC mechanism. This test depends on test.proto definition diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java index baf443e794..356c17267d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcClientLeaks.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.ipc; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1; import static org.junit.Assert.assertTrue; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.net.Socket; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java index c12331fbf6..b2d795d24c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcHandlerException.java @@ -43,7 +43,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @RunWith(Parameterized.class) @Category({ RPCTests.class, SmallTests.class }) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java index fba5ca7937..16fc4da64e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java @@ -54,7 +54,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @RunWith(Parameterized.class) @Category({ RPCTests.class, MediumTests.class }) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java index a1b33f769c..b4f93c5a9a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java @@ -30,10 +30,10 @@ import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java index 90ed73b136..36e45e4d18 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java @@ -47,7 +47,7 @@ import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; @Category({MapReduceTests.class, SmallTests.class}) public class TestGroupingTableMap { @@ -142,7 +142,8 @@ public class TestGroupingTableMap { new OutputCollector() { @Override public void collect(ImmutableBytesWritable arg, Result result) throws IOException { - assertArrayEquals(com.google.common.primitives.Bytes.concat(firstPartKeyValue, bSeparator, + assertArrayEquals(org.apache.hadoop.hbase.shaded.com.google.common.primitives. + Bytes.concat(firstPartKeyValue, bSeparator, secondPartKeyValue), arg.copyBytes()); outputCollected.set(true); } @@ -156,7 +157,8 @@ public class TestGroupingTableMap { final byte[] secondPartValue = Bytes.toBytes("4678456942345"); byte[][] data = { firstPartValue, secondPartValue }; ImmutableBytesWritable byteWritable = gTableMap.createGroupKey(data); - assertArrayEquals(com.google.common.primitives.Bytes.concat(firstPartValue, + assertArrayEquals(org.apache.hadoop.hbase.shaded.com.google.common.primitives. + Bytes.concat(firstPartValue, bSeparator, secondPartValue), byteWritable.get()); } finally { if (gTableMap != null) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java index 915a35d238..665c547019 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestMultiTableSnapshotInputFormat.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.mapred; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java index 6c7e4458b0..4ebd8bf4ab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestRowCounter.java @@ -43,7 +43,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; @Category({MapReduceTests.class, SmallTests.class}) public class TestRowCounter { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java index 22dda35784..ac2f20d895 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java @@ -54,8 +54,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; @Category({MapReduceTests.class, LargeTests.class}) public class TestTableMapReduceUtil { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java index 47421f141d..c717fa96a9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java index a7642af0b5..1f4efcd8f3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHashTable.java @@ -42,8 +42,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java index f569446664..3c38102185 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsvParser.java @@ -37,9 +37,9 @@ import org.apache.hadoop.hbase.util.Pair; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.base.Joiner; -import com.google.common.base.Splitter; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Splitter; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * Tests for {@link TsvParser}. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java index 10e83faac9..32ebbd2a19 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java @@ -77,7 +77,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java index 253e118340..32f511b236 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormat.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.base.Function; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Multimaps; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Function; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimaps; import edu.umd.cs.findbugs.annotations.Nullable; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java index b4b8056fe6..1c33848666 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableSnapshotInputFormatImpl.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hbase.mapreduce; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -131,7 +131,8 @@ public class TestMultiTableSnapshotInputFormatImpl { @Override public String toString() { - return com.google.common.base.Objects.toStringHelper(this).add("startRow", startRow) + return org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects. + toStringHelper(this).add("startRow", startRow) .add("stopRow", stopRow).toString(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java index 1bf851e025..79b2cf05c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSyncTable.java @@ -49,7 +49,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; /** * Basic test for the SyncTable M/R tool diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java index 96584c5d3f..5e630822b7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSnapshotInputFormat.java @@ -56,7 +56,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.util.Arrays; import org.apache.hadoop.fs.FileSystem; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java index d6210b9124..1c31acd0c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMaster.java @@ -54,7 +54,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.junit.rules.TestName; @Category({MasterTests.class, MediumTests.class}) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java index 23efdb21cb..ea57e1504d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java @@ -48,7 +48,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Tests for the master status page and its template. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java index f174438de1..7ae2bd8f61 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadBalancerPerformanceEvaluation.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.master.balancer; -import com.google.common.base.Preconditions; -import com.google.common.base.Stopwatch; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Stopwatch; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.logging.Log; @@ -41,6 +41,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; /** * Tool to test performance of different {@link org.apache.hadoop.hbase.master.LoadBalancer} @@ -153,21 +154,21 @@ public class LoadBalancerPerformanceEvaluation extends AbstractHBaseTool { String methodName = "roundRobinAssignment"; LOG.info("Calling " + methodName); - Stopwatch watch = new Stopwatch().start(); + Stopwatch watch = Stopwatch.createStarted(); loadBalancer.roundRobinAssignment(regions, servers); - System.out.print(formatResults(methodName, watch.elapsedMillis())); + System.out.print(formatResults(methodName, watch.elapsed(TimeUnit.MILLISECONDS))); methodName = "retainAssignment"; LOG.info("Calling " + methodName); watch.reset().start(); loadBalancer.retainAssignment(regionServerMap, servers); - System.out.print(formatResults(methodName, watch.elapsedMillis())); + System.out.print(formatResults(methodName, watch.elapsed(TimeUnit.MILLISECONDS))); methodName = "balanceCluster"; LOG.info("Calling " + methodName); watch.reset().start(); loadBalancer.balanceCluster(serverRegionMap); - System.out.print(formatResults(methodName, watch.elapsedMillis())); + System.out.print(formatResults(methodName, watch.elapsed(TimeUnit.MILLISECONDS))); return EXIT_SUCCESS; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadOnlyFavoredStochasticBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadOnlyFavoredStochasticBalancer.java index 276d65e8e8..3fa914d101 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadOnlyFavoredStochasticBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/LoadOnlyFavoredStochasticBalancer.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.master.balancer; import java.util.List; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Used for FavoredNode unit tests diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java index c43b94fb0f..400c9d937e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestBaseLoadBalancer.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.master.balancer; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java index a6ee897be3..3f39f98bb3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredNodeTableImport.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Threads; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.junit.After; import org.junit.Test; import org.junit.experimental.categories.Category; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java index ac199cb2c3..d86d62dff7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticBalancerPickers.java @@ -28,7 +28,7 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -55,7 +55,7 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestFavoredStochasticBalancerPickers extends BalancerTestBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java index 83c935552f..03285c679a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/balancer/TestFavoredStochasticLoadBalancer.java @@ -63,9 +63,9 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; @Category(MediumTests.class) public class TestFavoredStochasticLoadBalancer extends BalancerTestBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java index 3467f08d69..9521d47592 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java @@ -30,7 +30,7 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java index 6df05c0029..7497eb09f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestReplicationHFileCleaner.java @@ -17,7 +17,7 @@ import static org.junit.Assert.fail; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.lang.reflect.Field; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java index 8f88af7686..2fbbaeda12 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java @@ -68,7 +68,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Test the master-related aspects of a snapshot diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java index 76d4585f10..19e7137ae0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java @@ -27,8 +27,8 @@ import java.io.IOException; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java index 7bc500231f..37537e5432 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java @@ -25,7 +25,7 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import java.io.IOException; import java.util.Collections; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java index 710e63120a..b5a9f29a44 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java @@ -52,7 +52,7 @@ import org.mockito.InOrder; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Test Procedure coordinator operation. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java index f20c8a9c08..856e449dfd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java @@ -56,7 +56,7 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.mockito.verification.VerificationMode; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Cluster-wide testing of a distributed three-phase commit using a 'real' zookeeper cluster diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java index e7e2b23564..8b947ee234 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java @@ -48,7 +48,7 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.mockito.verification.VerificationMode; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Test zookeeper-based, procedure controllers diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java index c31bfe5080..bfd6a4b889 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/SpaceQuotaHelperForTests.java @@ -50,9 +50,9 @@ import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.util.Bytes; import org.junit.rules.TestName; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; @InterfaceAudience.Private public class SpaceQuotaHelperForTests { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java index 385f8c4d72..a673bcb654 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestNamespaceQuotaViolationStore.java @@ -16,7 +16,7 @@ */ package org.apache.hadoop.hbase.quotas; -import static com.google.common.collect.Iterables.size; +import static org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables.size; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java index 52921af6c9..b9f54adfdb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaAdmin.java @@ -44,7 +44,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java index da294c6465..cbdb8ec71d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChore.java @@ -32,7 +32,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * Non-HBase cluster unit tests for {@link QuotaObserverChore}. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java index 4b0fa24791..736be8d0f4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaObserverChoreWithMiniCluster.java @@ -53,8 +53,8 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Iterables; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * Test class for {@link QuotaObserverChore} that uses a live HBase cluster. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java index 4022e3ffeb..27fc2a122c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSnapshotQuotaObserverChore.java @@ -58,9 +58,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Iterables; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.HashMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * Test class for the {@link SnapshotQuotaObserverChore}. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java index 85c7de237c..822026e64d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSpaceQuotasWithSnapshots.java @@ -52,7 +52,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; /** * Test class to exercise the inclusion of snapshots in space quotas diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java index 5a4969a4a6..fe7500d2c8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java @@ -16,7 +16,7 @@ */ package org.apache.hadoop.hbase.quotas; -import static com.google.common.collect.Iterables.size; +import static org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables.size; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTablesWithQuotas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTablesWithQuotas.java index 3d0aec6e1b..2de649b347 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTablesWithQuotas.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTablesWithQuotas.java @@ -42,7 +42,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * Non-HBase cluster unit tests for {@link TablesWithQuotas}. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java index efbac63995..b33b45d0cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/AbstractTestDateTieredCompactionPolicy.java @@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertEquals; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java index 9b96ff2547..023f426a20 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java @@ -361,4 +361,4 @@ public class RegionAsTable implements Table { @Override public int getReadRpcTimeout() { throw new UnsupportedOperationException(); } -} \ No newline at end of file +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java index 7fb7f21488..3f1613c8a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionArchiveIOException.java @@ -29,7 +29,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java index 543ca6ccb4..0e4c4f9ecf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionPolicy.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.regionserver; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java index 3b15ff3aba..91f500331b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultMemStore.java @@ -18,9 +18,9 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.base.Joiner; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java index 10510365df..ab60d9daa5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java @@ -70,8 +70,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Iterators; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterators; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.junit.rules.TestName; @Category(LargeTests.class) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 4f46c88ccd..d9853d99fe 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -40,8 +40,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java index 5467c3f6a6..03ba998d7e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java @@ -35,7 +35,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java index d640cbf63f..c17234ee90 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.io.InterruptedIOException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java index a28f8f1a73..2a1655db0b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldClient.java @@ -47,7 +47,7 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Tests bulk loading of HFiles with old non-secure client for backward compatibility. Will be diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 7070a801ee..d54d566176 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -68,9 +68,9 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.mockito.Mockito; -import com.google.common.base.Joiner; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java index e27d7c23e5..f171dd05f0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java @@ -44,10 +44,10 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.primitives.Ints; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.primitives.Ints; import org.junit.experimental.categories.Category; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java index 6e5cbf868a..031b56859b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestPerColumnFamilyFlush.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import com.google.common.hash.Hashing; +import org.apache.hadoop.hbase.shaded.com.google.common.hash.Hashing; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java index 97d13b1a3c..40f57a3b7c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java @@ -44,7 +44,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java index dcb2c86964..11c985d1c7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMergeTransactionOnCluster.java @@ -86,7 +86,7 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.rules.TestRule; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; @Category({RegionServerTests.class, MediumTests.class}) public class TestRegionMergeTransactionOnCluster { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index 8d72aa217c..447595530c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -22,7 +22,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 2318414d98..5d36afd61c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -105,7 +105,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.Mockito; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import java.util.concurrent.atomic.AtomicInteger; /** * Test class for the Store diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimestampFilterSeekHint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimestampFilterSeekHint.java index 58f0c56058..1d6393b681 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimestampFilterSeekHint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestTimestampFilterSeekHint.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; -import com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java index 96fee2459d..291f252ff5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.regionserver.compactions; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.google.common.base.Objects; +import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects; import java.util.LinkedList; import java.util.List; @@ -79,7 +79,7 @@ class MockStoreFileGenerator { when(mockSf.isReference()).thenReturn(false); // TODO come back to // this when selection takes this into account when(mockSf.getReader()).thenReturn(reader); - String toString = Objects.toStringHelper("MockStoreFile") + String toString = MoreObjects.toStringHelper("MockStoreFile") .add("isReference", false) .add("fileSize", StringUtils.humanReadableInt(sizeInBytes)) .add("seqId", seqId) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java index 74279acfd7..4f8d5b2a32 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestStripeCompactionPolicy.java @@ -36,8 +36,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.IOException; import java.util.ArrayList; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java index 72fc4b23f6..6b60ce160d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestAsyncProtobufLog.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver.wal; -import com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java index cc3e43b64d..a3c5526028 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java @@ -78,7 +78,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({ReplicationTests.class, LargeTests.class}) public class TestReplicationSmallTests extends TestReplicationBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java index c3b7eafdd2..7ea698c5b8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java @@ -158,7 +158,7 @@ public class TestReplicationSource { // completes } }; - replicationEndpoint.start(); + replicationEndpoint.startAsync(); ReplicationPeers mockPeers = Mockito.mock(ReplicationPeers.class); ReplicationPeer mockPeer = Mockito.mock(ReplicationPeer.class); Mockito.when(mockPeer.getPeerBandwidth()).thenReturn(0L); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java index 8ea0baed77..8e0349cfb3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationWALEntryFilters.java @@ -22,7 +22,7 @@ import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.util.HashMap; import java.util.HashSet; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java index 812e7312af..dba3119084 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpoint.java @@ -62,7 +62,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.junit.rules.TestName; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java index ad5063a31f..079fb6e57d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestRegionReplicaReplicationEndpointNoMaster.java @@ -72,7 +72,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; /** * Tests RegionReplicaReplicationEndpoint. Unlike TestRegionReplicaReplicationEndpoint this @@ -250,7 +250,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster { when(context.getMetrics()).thenReturn(mock(MetricsSource.class)); replicator.init(context); - replicator.start(); + replicator.startAsync(); //load some data to primary HTU.loadNumericRows(table, f, 0, 1000); @@ -287,7 +287,7 @@ public class TestRegionReplicaReplicationEndpointNoMaster { when(context.getReplicationPeer()).thenReturn(mockPeer); replicator.init(context); - replicator.start(); + replicator.startAsync(); // test the filter for the RE, not actual replication WALEntryFilter filter = replicator.getWALEntryfilter(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java index 72dcf5767b..e39561991d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSinkManager.java @@ -36,7 +36,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({ReplicationTests.class, SmallTests.class}) public class TestReplicationSinkManager { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java index e1b375661d..d55dd803e5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java @@ -96,7 +96,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Sets; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; /** * An abstract class that tests ReplicationSourceManager. Classes that extend this class should diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java index 237efe9579..07bb2b73e5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/HBaseKerberosUtils.java @@ -22,7 +22,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import com.google.common.base.Strings; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Strings; @InterfaceAudience.Private public class HBaseKerberosUtils { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java index a31b7b6537..e6c91d2179 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestSecureIPC.java @@ -28,7 +28,7 @@ import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.fail; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.io.File; import java.io.IOException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java index 709e79669c..1b4edd60e2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/TestUser.java @@ -34,7 +34,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ImmutableSet; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; import static org.junit.Assert.*; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java index 1d1a9361f5..e47cfd5da3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java @@ -64,8 +64,8 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import com.google.protobuf.BlockingRpcChannel; import com.google.protobuf.ServiceException; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index bcf8670a27..a001169516 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -3174,4 +3174,4 @@ public class TestAccessController extends SecureTestUtil { verifyAllowed(regionLockHeartbeatAction, SUPERUSER, USER_ADMIN, namespaceUser, tableACUser); verifyDenied(regionLockHeartbeatAction, globalRWXUser, tableRWXUser); } -} \ No newline at end of file +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java index 102b28ae2c..155ce5772e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java @@ -64,7 +64,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({SecurityTests.class, LargeTests.class}) public class TestCellACLs extends SecureTestUtil { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java index 3c37d4b469..7889138615 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java @@ -53,7 +53,7 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import com.google.protobuf.BlockingRpcChannel; @Category({SecurityTests.class, MediumTests.class}) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java index 4d2cb0bfa3..6a67878329 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java @@ -57,8 +57,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; /** * Test the reading and writing of access permissions on {@code _acl_} table. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java index 040dfa6a97..dfc976a055 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java @@ -82,7 +82,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category({SecurityTests.class, LargeTests.class}) public class TestWithDisabledAuthorization extends SecureTestUtil { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java index 3ec0495c24..012236cb10 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java @@ -590,4 +590,4 @@ public class TestTokenAuthentication { return result; } } -} \ No newline at end of file +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java index c59196e753..8c07a54c91 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java @@ -27,7 +27,7 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.log4j.Appender; import org.apache.log4j.LogManager; import org.apache.log4j.spi.LoggingEvent; -import com.google.common.collect.Iterables; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Iterables; import org.junit.After; import org.junit.Before; import org.junit.Rule; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java index e4e0af565c..1d2e9a6b50 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.util; import static org.apache.hadoop.hbase.util.test.LoadTestDataGenerator.INCREMENT; import static org.apache.hadoop.hbase.util.test.LoadTestDataGenerator.MUTATE_INFO; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import java.io.IOException; import java.util.Arrays; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java index 564c000433..d372efba34 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationPr import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator; import org.apache.hadoop.util.StringUtils; -import com.google.common.base.Preconditions; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; /** Creates multiple threads that write key/values into the */ public class MultiThreadedUpdater extends MultiThreadedWriterBase { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java index a9ecf9ed0e..626b943a8e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestConfigurationUtil.java @@ -18,8 +18,8 @@ package org.apache.hadoop.hbase.util; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Before; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java index 7f891d87ce..58a3a75c0c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java @@ -56,7 +56,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; @Ignore // Until after HBASE-14614 goes in. @Category({MiscTests.class, LargeTests.class}) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java index 931830f3dd..02578ff264 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java @@ -34,8 +34,8 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.Test; -import com.google.common.collect.ComparisonChain; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ComparisonChain; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; import org.junit.experimental.categories.Category; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java index bdae0e513b..95ee8a8958 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java @@ -26,7 +26,7 @@ import java.util.Iterator; import java.util.List; import java.util.ListIterator; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java index c519809d79..f83e5a8f40 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.util.HBaseFsck; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildOverlap.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildOverlap.java index 85e0560cde..9bf22129b6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildOverlap.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildOverlap.java @@ -36,7 +36,7 @@ import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Multimap; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Multimap; /** * This builds a table, builds an overlap, and then fails when attempting to diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java index 65401dea3b..196e70d370 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java @@ -45,7 +45,7 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import com.google.protobuf.ServiceException; @Category({RegionServerTests.class, MediumTests.class}) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java index 611f8c3145..ed71fa5087 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java @@ -23,9 +23,9 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableMap; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/hbase-shell/pom.xml b/hbase-shell/pom.xml index d9347d255c..c3623a2058 100644 --- a/hbase-shell/pom.xml +++ b/hbase-shell/pom.xml @@ -318,35 +318,77 @@ org.apache.hadoop hadoop-auth + + + com.google.guava + guava + + org.apache.hadoop hadoop-client + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-core + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-jobclient test-jar test + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs test-jar test + + + com.google.guava + guava + + org.apache.hadoop hadoop-minicluster test + + + com.google.guava + guava + + org.apache.hadoop @@ -400,6 +442,12 @@ org.apache.hadoop hadoop-minicluster + + + com.google.guava + guava + + org.apache.hadoop diff --git a/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java b/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java index 8651b8dc0f..d776a54ad9 100644 --- a/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java +++ b/hbase-shell/src/test/rsgroup/org/apache/hadoop/hbase/client/rsgroup/TestShellRSGroups.java @@ -102,6 +102,4 @@ public class TestShellRSGroups { System.clearProperty("shell.test"); } } - -} - +} \ No newline at end of file diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml index b57e59a921..ef8e1f536d 100644 --- a/hbase-spark/pom.xml +++ b/hbase-spark/pom.xml @@ -44,6 +44,10 @@ + + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + javax.servlet @@ -69,6 +73,10 @@ provided + com.google.guava + guava + + org.scala-lang scala-library @@ -137,6 +145,10 @@ ${hadoop-two.version} + com.google.guava + guava + + log4j log4j @@ -245,6 +257,10 @@ test + com.google.guava + guava + + log4j log4j @@ -520,6 +536,12 @@ hadoop-mapreduce-client-jobclient test-jar test + + + com.google.guava + guava + + org.apache.avro diff --git a/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java b/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java index c3f1bcbedc..93cd939c10 100644 --- a/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java +++ b/hbase-spark/src/test/java/org/apache/hadoop/hbase/spark/TestJavaHBaseContext.java @@ -62,7 +62,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; import scala.Tuple2; -import com.google.common.io.Files; +import org.apache.hadoop.hbase.shaded.com.google.common.io.Files; @Category({MiscTests.class, MediumTests.class}) public class TestJavaHBaseContext implements Serializable { diff --git a/hbase-testing-util/pom.xml b/hbase-testing-util/pom.xml index 66417aad94..9b8733f301 100644 --- a/hbase-testing-util/pom.xml +++ b/hbase-testing-util/pom.xml @@ -150,32 +150,68 @@ org.apache.hadoop hadoop-client compile + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-core compile + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-jobclient compile + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs compile + + + com.google.guava + guava + + org.apache.hadoop hadoop-hdfs test-jar compile + + + com.google.guava + guava + + org.apache.hadoop hadoop-minicluster compile + + + com.google.guava + guava + + org.apache.hadoop diff --git a/hbase-thrift/pom.xml b/hbase-thrift/pom.xml index b6253869bf..821806347d 100644 --- a/hbase-thrift/pom.xml +++ b/hbase-thrift/pom.xml @@ -221,8 +221,8 @@ hbase-client - com.google.guava - guava + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous com.google.protobuf @@ -462,16 +462,34 @@ org.apache.hadoop hadoop-client + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-core + + + com.google.guava + guava + + org.apache.hadoop hadoop-mapreduce-client-jobclient test-jar test + + + com.google.guava + guava + + org.apache.hadoop @@ -481,6 +499,12 @@ org.apache.hadoop hadoop-minicluster test + + + com.google.guava + guava + + diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java index b01bacfc2f..d67bc16487 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java @@ -41,7 +41,7 @@ import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransportException; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * A bounded thread pool server customized for HBase. diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 6a074fdac6..476d3b1ea3 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -127,9 +127,9 @@ import org.apache.thrift.transport.TTransportFactory; import org.eclipse.jetty.http.HttpVersion; import org.eclipse.jetty.server.*; -import com.google.common.base.Joiner; -import com.google.common.base.Throwables; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Throwables; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.ssl.SslContextFactory; diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java index 560ae64918..2899a9b811 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java @@ -90,7 +90,7 @@ import org.apache.thrift.transport.TServerTransport; import org.apache.thrift.transport.TTransportException; import org.apache.thrift.transport.TTransportFactory; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * ThriftServer - this class starts up a Thrift server which implements the HBase API specified in the diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java index c04b36f0b5..70cb66af7f 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java @@ -43,7 +43,7 @@ import static org.junit.Assert.assertFalse; import org.junit.Rule; import org.junit.rules.ExpectedException; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; /** * Start the HBase Thrift HTTP server on a random port through the command-line diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java index 87998dacc6..4d52576cce 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java @@ -51,7 +51,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; -import com.google.common.base.Joiner; +import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; /** * Start the HBase Thrift server on a random port through the command-line diff --git a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java index c3f59f61cc..3f5c388503 100644 --- a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java @@ -18,7 +18,7 @@ */ package org.apache.hadoop.hbase.thrift2; -import com.google.common.collect.Lists; +import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; diff --git a/pom.xml b/pom.xml index c64033cf3a..ad84041c47 100644 --- a/pom.xml +++ b/pom.xml @@ -1329,6 +1329,7 @@ 3.0.4 ${compileSource} + 1.0.0-SNAPSHOT 2.7.1 3.0.0-alpha2 @@ -1353,7 +1354,6 @@ 4.5.3 4.4.6 3.2.1 - 12.0.1 2.23.2 2.2.12 9.3.8.v20160314 @@ -1681,15 +1681,9 @@ ${metrics-core.version} - com.google.guava - guava - ${guava.version} - - - com.google.code.findbugs - jsr305 - - + org.apache.hbase.thirdparty + hbase-shaded-miscellaneous + ${hbase-thirdparty.version} commons-collections -- 2.11.0 (Apple Git-81)