From 3db5645aae3b36287e63cf206925d9ee37821ba1 Mon Sep 17 00:00:00 2001
From: zhangduo
Date: Tue, 4 Apr 2017 20:30:10 +0800
Subject: [PATCH] HBASE-17857 Remove IS annotations from IA.Public classes
---
.../hbase/classification/InterfaceAudience.java | 1 -
.../hbase/classification/InterfaceStability.java | 1 -
.../apache/hadoop/hbase/CallDroppedException.java | 4 +-
.../hadoop/hbase/CallQueueTooBigException.java | 4 +-
.../hadoop/hbase/ClockOutOfSyncException.java | 2 -
.../org/apache/hadoop/hbase/ClusterStatus.java | 2 -
.../apache/hadoop/hbase/DoNotRetryIOException.java | 2 -
.../hadoop/hbase/DroppedSnapshotException.java | 2 -
.../org/apache/hadoop/hbase/HColumnDescriptor.java | 2 -
.../java/org/apache/hadoop/hbase/HRegionInfo.java | 2 -
.../org/apache/hadoop/hbase/HRegionLocation.java | 2 -
.../org/apache/hadoop/hbase/HTableDescriptor.java | 2 -
.../hbase/InvalidFamilyOperationException.java | 2 -
.../org/apache/hadoop/hbase/KeepDeletedCells.java | 2 -
.../hadoop/hbase/MasterNotRunningException.java | 2 -
.../hadoop/hbase/MemoryCompactionPolicy.java | 2 -
.../hadoop/hbase/MultiActionResultTooLarge.java | 2 -
.../hadoop/hbase/NamespaceExistException.java | 2 -
.../hadoop/hbase/NamespaceNotFoundException.java | 2 -
.../hbase/NotAllMetaRegionsOnlineException.java | 2 -
.../hadoop/hbase/NotServingRegionException.java | 2 -
.../apache/hadoop/hbase/PleaseHoldException.java | 2 -
.../org/apache/hadoop/hbase/RegionException.java | 2 -
.../java/org/apache/hadoop/hbase/RegionLoad.java | 2 -
.../hadoop/hbase/RegionTooBusyException.java | 2 -
.../hbase/ReplicationPeerNotFoundException.java | 4 +-
.../hadoop/hbase/RetryImmediatelyException.java | 2 -
.../java/org/apache/hadoop/hbase/ServerLoad.java | 2 -
.../apache/hadoop/hbase/TableExistsException.java | 2 -
.../hadoop/hbase/TableInfoMissingException.java | 2 -
.../hadoop/hbase/TableNotDisabledException.java | 2 -
.../hadoop/hbase/TableNotEnabledException.java | 2 -
.../hadoop/hbase/TableNotFoundException.java | 2 -
.../hadoop/hbase/UnknownRegionException.java | 2 -
.../hadoop/hbase/UnknownScannerException.java | 2 -
.../hadoop/hbase/ZooKeeperConnectionException.java | 2 -
.../java/org/apache/hadoop/hbase/client/Admin.java | 2 -
.../org/apache/hadoop/hbase/client/Append.java | 2 -
.../org/apache/hadoop/hbase/client/AsyncAdmin.java | 2 -
.../hadoop/hbase/client/AsyncConnection.java | 4 +-
.../org/apache/hadoop/hbase/client/AsyncTable.java | 2 -
.../apache/hadoop/hbase/client/AsyncTableBase.java | 2 -
.../hadoop/hbase/client/AsyncTableBuilder.java | 2 -
.../hbase/client/AsyncTableRegionLocator.java | 2 -
.../org/apache/hadoop/hbase/client/Attributes.java | 2 -
.../hadoop/hbase/client/BufferedMutator.java | 3 -
.../hadoop/hbase/client/BufferedMutatorParams.java | 2 -
.../apache/hadoop/hbase/client/CompactType.java | 4 +-
.../hadoop/hbase/client/CompactionState.java | 2 -
.../org/apache/hadoop/hbase/client/Connection.java | 2 -
.../hadoop/hbase/client/ConnectionFactory.java | 2 -
.../apache/hadoop/hbase/client/Consistency.java | 2 -
.../org/apache/hadoop/hbase/client/Delete.java | 2 -
.../hbase/client/DoNotRetryRegionException.java | 2 -
.../org/apache/hadoop/hbase/client/Durability.java | 2 -
.../java/org/apache/hadoop/hbase/client/Get.java | 2 -
.../hadoop/hbase/client/HTableMultiplexer.java | 3 -
.../org/apache/hadoop/hbase/client/Increment.java | 2 -
.../apache/hadoop/hbase/client/IsolationLevel.java | 2 -
.../hadoop/hbase/client/MasterSwitchType.java | 4 +-
.../hbase/client/MobCompactPartitionPolicy.java | 2 -
.../org/apache/hadoop/hbase/client/Mutation.java | 2 -
.../hbase/client/NoServerForRegionException.java | 2 -
.../org/apache/hadoop/hbase/client/Operation.java | 2 -
.../hbase/client/OperationWithAttributes.java | 2 -
.../java/org/apache/hadoop/hbase/client/Put.java | 2 -
.../java/org/apache/hadoop/hbase/client/Query.java | 4 +-
.../apache/hadoop/hbase/client/RawAsyncTable.java | 4 -
.../hadoop/hbase/client/RawScanResultConsumer.java | 6 +-
.../hadoop/hbase/client/RegionLoadStats.java | 2 -
.../apache/hadoop/hbase/client/RegionLocator.java | 2 -
.../hbase/client/RegionOfflineException.java | 2 -
.../hadoop/hbase/client/RequestController.java | 4 -
.../hbase/client/RequestControllerFactory.java | 2 -
.../org/apache/hadoop/hbase/client/Result.java | 2 -
.../apache/hadoop/hbase/client/ResultScanner.java | 2 -
.../hbase/client/RetriesExhaustedException.java | 2 -
.../RetriesExhaustedWithDetailsException.java | 2 -
.../java/org/apache/hadoop/hbase/client/Row.java | 2 -
.../org/apache/hadoop/hbase/client/RowAccess.java | 2 -
.../apache/hadoop/hbase/client/RowMutations.java | 2 -
.../hadoop/hbase/client/RowTooBigException.java | 2 -
.../hadoop/hbase/client/RpcRetryingCaller.java | 4 +-
.../java/org/apache/hadoop/hbase/client/Scan.java | 3 -
.../hadoop/hbase/client/ScanResultConsumer.java | 2 -
.../hbase/client/ShortCircuitMasterConnection.java | 2 -
.../hadoop/hbase/client/SnapshotDescription.java | 2 -
.../apache/hadoop/hbase/client/SnapshotType.java | 4 +-
.../hbase/client/SyncCoprocessorRpcChannel.java | 4 +-
.../java/org/apache/hadoop/hbase/client/Table.java | 2 -
.../apache/hadoop/hbase/client/TableBuilder.java | 2 -
.../hbase/client/UnmodifyableHRegionInfo.java | 2 -
.../hbase/client/UnmodifyableHTableDescriptor.java | 2 -
.../hadoop/hbase/client/WrongRowIOException.java | 2 -
.../hbase/client/backoff/ClientBackoffPolicy.java | 4 +-
.../backoff/ExponentialClientBackoffPolicy.java | 4 +-
.../hadoop/hbase/client/coprocessor/Batch.java | 4 -
.../hadoop/hbase/client/metrics/ScanMetrics.java | 2 -
.../client/metrics/ServerSideScanMetrics.java | 2 -
.../hbase/client/replication/ReplicationAdmin.java | 2 -
.../hadoop/hbase/client/replication/TableCFs.java | 4 +-
.../hbase/client/security/SecurityCapability.java | 2 -
.../coprocessor/BypassCoprocessorException.java | 4 +-
.../hbase/coprocessor/CoprocessorException.java | 2 -
.../exceptions/ConnectionClosingException.java | 2 -
.../exceptions/FailedSanityCheckException.java | 2 -
.../hbase/exceptions/MergeRegionException.java | 2 -
.../exceptions/PreemptiveFastFailException.java | 4 +-
.../exceptions/RegionInRecoveryException.java | 2 -
.../hbase/exceptions/RequestTooBigException.java | 2 -
.../hbase/exceptions/ScannerResetException.java | 2 -
.../hbase/exceptions/UnknownProtocolException.java | 2 -
.../hadoop/hbase/filter/BinaryComparator.java | 2 -
.../hbase/filter/BinaryPrefixComparator.java | 2 -
.../apache/hadoop/hbase/filter/BitComparator.java | 3 -
.../hadoop/hbase/filter/ColumnCountGetFilter.java | 2 -
.../hbase/filter/ColumnPaginationFilter.java | 2 -
.../hadoop/hbase/filter/ColumnPrefixFilter.java | 2 -
.../hadoop/hbase/filter/ColumnRangeFilter.java | 2 -
.../apache/hadoop/hbase/filter/CompareFilter.java | 3 -
.../hadoop/hbase/filter/DependentColumnFilter.java | 2 -
.../apache/hadoop/hbase/filter/FamilyFilter.java | 2 -
.../org/apache/hadoop/hbase/filter/Filter.java | 3 -
.../org/apache/hadoop/hbase/filter/FilterList.java | 3 -
.../hadoop/hbase/filter/FirstKeyOnlyFilter.java | 2 -
.../FirstKeyValueMatchingQualifiersFilter.java | 2 -
.../apache/hadoop/hbase/filter/FuzzyRowFilter.java | 2 -
.../hadoop/hbase/filter/InclusiveStopFilter.java | 2 -
.../hbase/filter/IncompatibleFilterException.java | 2 -
.../hbase/filter/InvalidRowFilterException.java | 2 -
.../apache/hadoop/hbase/filter/KeyOnlyFilter.java | 2 -
.../apache/hadoop/hbase/filter/LongComparator.java | 2 -
.../hadoop/hbase/filter/MultiRowRangeFilter.java | 3 -
.../hbase/filter/MultipleColumnPrefixFilter.java | 2 -
.../apache/hadoop/hbase/filter/NullComparator.java | 2 -
.../org/apache/hadoop/hbase/filter/PageFilter.java | 2 -
.../apache/hadoop/hbase/filter/ParseConstants.java | 2 -
.../apache/hadoop/hbase/filter/ParseFilter.java | 2 -
.../apache/hadoop/hbase/filter/PrefixFilter.java | 2 -
.../hadoop/hbase/filter/QualifierFilter.java | 2 -
.../hadoop/hbase/filter/RandomRowFilter.java | 2 -
.../hadoop/hbase/filter/RegexStringComparator.java | 3 -
.../org/apache/hadoop/hbase/filter/RowFilter.java | 2 -
.../filter/SingleColumnValueExcludeFilter.java | 2 -
.../hbase/filter/SingleColumnValueFilter.java | 2 -
.../org/apache/hadoop/hbase/filter/SkipFilter.java | 2 -
.../hadoop/hbase/filter/SubstringComparator.java | 2 -
.../hadoop/hbase/filter/TimestampsFilter.java | 2 -
.../apache/hadoop/hbase/filter/ValueFilter.java | 2 -
.../hadoop/hbase/filter/WhileMatchFilter.java | 2 -
.../apache/hadoop/hbase/ipc/BadAuthException.java | 2 -
.../hadoop/hbase/ipc/CallCancelledException.java | 2 -
.../hadoop/hbase/ipc/CallTimeoutException.java | 2 -
.../hbase/ipc/CallerDisconnectedException.java | 2 -
.../hbase/ipc/CellScannerButNoCodecException.java | 4 +-
.../hadoop/hbase/ipc/CoprocessorRpcChannel.java | 4 +-
.../hadoop/hbase/ipc/FailedServerException.java | 2 -
.../hbase/ipc/FallbackDisallowedException.java | 4 +-
.../hadoop/hbase/ipc/FatalConnectionException.java | 2 -
.../hbase/ipc/NettyRpcClientConfigHelper.java | 2 -
.../hbase/ipc/RemoteWithExtrasException.java | 2 -
.../hbase/ipc/ServerNotRunningYetException.java | 2 -
.../hadoop/hbase/ipc/ServerTooBusyException.java | 4 +-
.../hbase/ipc/StoppedRpcClientException.java | 2 -
.../hbase/ipc/UnsupportedCellCodecException.java | 2 -
.../ipc/UnsupportedCompressionCodecException.java | 2 -
.../hbase/ipc/UnsupportedCryptoException.java | 2 -
.../hadoop/hbase/ipc/WrongVersionException.java | 2 -
.../hbase/quotas/QuotaExceededException.java | 2 -
.../apache/hadoop/hbase/quotas/QuotaFilter.java | 2 -
.../apache/hadoop/hbase/quotas/QuotaRetriever.java | 4 +-
.../org/apache/hadoop/hbase/quotas/QuotaScope.java | 2 -
.../apache/hadoop/hbase/quotas/QuotaSettings.java | 2 -
.../hadoop/hbase/quotas/QuotaSettingsFactory.java | 2 -
.../org/apache/hadoop/hbase/quotas/QuotaType.java | 2 -
.../apache/hadoop/hbase/quotas/ThrottleType.java | 2 -
.../hadoop/hbase/quotas/ThrottlingException.java | 3 -
.../hadoop/hbase/regionserver/BloomType.java | 2 -
.../hadoop/hbase/regionserver/LeaseException.java | 2 -
.../regionserver/NoSuchColumnFamilyException.java | 2 -
.../regionserver/RegionServerAbortedException.java | 2 -
.../regionserver/RegionServerRunningException.java | 2 -
.../regionserver/RegionServerStoppedException.java | 2 -
.../hbase/regionserver/WrongRegionException.java | 2 -
.../regionserver/wal/FailedLogCloseException.java | 2 -
.../wal/FailedSyncBeforeLogCloseException.java | 2 -
.../hbase/replication/ReplicationException.java | 2 -
.../hbase/replication/ReplicationPeerConfig.java | 2 -
.../replication/ReplicationPeerDescription.java | 4 +-
.../hbase/security/AccessDeniedException.java | 2 -
.../hbase/security/access/AccessControlClient.java | 4 +-
.../security/access/AccessControlConstants.java | 2 -
.../hadoop/hbase/security/access/Permission.java | 3 -
.../hbase/security/visibility/Authorizations.java | 2 -
.../hbase/security/visibility/CellVisibility.java | 2 -
.../security/visibility/InvalidLabelException.java | 2 -
.../visibility/LabelAlreadyExistsException.java | 2 -
.../security/visibility/VisibilityClient.java | 2 -
.../VisibilityControllerNotReadyException.java | 2 -
.../hbase/snapshot/CorruptedSnapshotException.java | 2 -
.../hbase/snapshot/ExportSnapshotException.java | 2 -
.../hbase/snapshot/HBaseSnapshotException.java | 2 -
.../hbase/snapshot/RestoreSnapshotException.java | 2 -
.../hbase/snapshot/SnapshotCreationException.java | 2 -
.../snapshot/SnapshotDoesNotExistException.java | 2 -
.../hbase/snapshot/SnapshotExistsException.java | 2 -
.../snapshot/TablePartiallyOpenException.java | 2 -
.../hbase/snapshot/UnknownSnapshotException.java | 2 -
.../hbase/util/FileSystemVersionException.java | 2 -
.../org/apache/hadoop/hbase/util/JsonMapper.java | 2 -
.../hbase/TestInterfaceAudienceAnnotations.java | 144 +++++++++++++++------
.../java/org/apache/hadoop/hbase/AuthUtil.java | 2 -
.../main/java/org/apache/hadoop/hbase/Cell.java | 2 -
.../java/org/apache/hadoop/hbase/CellUtil.java | 2 -
.../java/org/apache/hadoop/hbase/ChoreService.java | 2 -
.../apache/hadoop/hbase/HBaseConfiguration.java | 2 -
.../org/apache/hadoop/hbase/HBaseIOException.java | 4 +-
.../hadoop/hbase/HBaseInterfaceAudience.java | 2 -
.../java/org/apache/hadoop/hbase/HConstants.java | 2 -
.../apache/hadoop/hbase/NamespaceDescriptor.java | 3 -
.../org/apache/hadoop/hbase/ProcedureInfo.java | 4 +-
.../org/apache/hadoop/hbase/ProcedureState.java | 2 -
.../org/apache/hadoop/hbase/ScheduledChore.java | 2 -
.../java/org/apache/hadoop/hbase/ServerName.java | 2 -
.../java/org/apache/hadoop/hbase/Stoppable.java | 2 -
.../java/org/apache/hadoop/hbase/TableName.java | 2 -
.../hadoop/hbase/filter/ByteArrayComparable.java | 2 -
.../hadoop/hbase/io/ByteBufferOutputStream.java | 2 -
.../hadoop/hbase/io/ImmutableBytesWritable.java | 3 -
.../java/org/apache/hadoop/hbase/io/TimeRange.java | 2 -
.../hadoop/hbase/io/compress/Compression.java | 2 -
.../org/apache/hadoop/hbase/io/crypto/Cipher.java | 2 -
.../hadoop/hbase/io/crypto/CipherProvider.java | 2 -
.../org/apache/hadoop/hbase/io/crypto/Context.java | 2 -
.../hbase/io/crypto/CryptoCipherProvider.java | 2 -
.../apache/hadoop/hbase/io/crypto/Decryptor.java | 2 -
.../hbase/io/crypto/DefaultCipherProvider.java | 2 -
.../apache/hadoop/hbase/io/crypto/Encryption.java | 3 -
.../apache/hadoop/hbase/io/crypto/Encryptor.java | 2 -
.../apache/hadoop/hbase/io/crypto/KeyProvider.java | 2 -
.../hbase/io/crypto/KeyStoreKeyProvider.java | 2 -
.../hbase/io/encoding/DataBlockEncoding.java | 2 -
.../java/org/apache/hadoop/hbase/net/Address.java | 2 -
.../apache/hadoop/hbase/rsgroup/RSGroupInfo.java | 2 -
.../org/apache/hadoop/hbase/security/User.java | 2 -
.../org/apache/hadoop/hbase/types/DataType.java | 2 -
.../hadoop/hbase/types/FixedLengthWrapper.java | 2 -
.../org/apache/hadoop/hbase/types/OrderedBlob.java | 2 -
.../apache/hadoop/hbase/types/OrderedBlobVar.java | 2 -
.../hadoop/hbase/types/OrderedBytesBase.java | 2 -
.../apache/hadoop/hbase/types/OrderedFloat32.java | 2 -
.../apache/hadoop/hbase/types/OrderedFloat64.java | 2 -
.../apache/hadoop/hbase/types/OrderedInt16.java | 2 -
.../apache/hadoop/hbase/types/OrderedInt32.java | 2 -
.../apache/hadoop/hbase/types/OrderedInt64.java | 2 -
.../org/apache/hadoop/hbase/types/OrderedInt8.java | 2 -
.../apache/hadoop/hbase/types/OrderedNumeric.java | 2 -
.../apache/hadoop/hbase/types/OrderedString.java | 2 -
.../java/org/apache/hadoop/hbase/types/PBType.java | 2 -
.../org/apache/hadoop/hbase/types/RawByte.java | 2 -
.../org/apache/hadoop/hbase/types/RawBytes.java | 2 -
.../hadoop/hbase/types/RawBytesFixedLength.java | 2 -
.../hadoop/hbase/types/RawBytesTerminated.java | 2 -
.../org/apache/hadoop/hbase/types/RawDouble.java | 2 -
.../org/apache/hadoop/hbase/types/RawFloat.java | 2 -
.../org/apache/hadoop/hbase/types/RawInteger.java | 2 -
.../org/apache/hadoop/hbase/types/RawLong.java | 2 -
.../org/apache/hadoop/hbase/types/RawShort.java | 2 -
.../org/apache/hadoop/hbase/types/RawString.java | 2 -
.../hadoop/hbase/types/RawStringFixedLength.java | 2 -
.../hadoop/hbase/types/RawStringTerminated.java | 2 -
.../java/org/apache/hadoop/hbase/types/Struct.java | 2 -
.../apache/hadoop/hbase/types/StructBuilder.java | 2 -
.../apache/hadoop/hbase/types/StructIterator.java | 2 -
.../hadoop/hbase/types/TerminatedWrapper.java | 2 -
.../java/org/apache/hadoop/hbase/types/Union2.java | 2 -
.../java/org/apache/hadoop/hbase/types/Union3.java | 2 -
.../java/org/apache/hadoop/hbase/types/Union4.java | 2 -
.../java/org/apache/hadoop/hbase/util/Base64.java | 5 -
.../apache/hadoop/hbase/util/ByteBufferUtils.java | 2 -
.../org/apache/hadoop/hbase/util/ByteRange.java | 2 -
.../apache/hadoop/hbase/util/ByteRangeUtils.java | 2 -
.../java/org/apache/hadoop/hbase/util/Bytes.java | 4 -
.../java/org/apache/hadoop/hbase/util/Counter.java | 2 -
.../java/org/apache/hadoop/hbase/util/MD5Hash.java | 2 -
.../java/org/apache/hadoop/hbase/util/Order.java | 2 -
.../org/apache/hadoop/hbase/util/OrderedBytes.java | 2 -
.../java/org/apache/hadoop/hbase/util/Pair.java | 2 -
.../apache/hadoop/hbase/util/PairOfSameType.java | 2 -
.../hadoop/hbase/util/PositionedByteRange.java | 2 -
.../hbase/util/ReadOnlyByteRangeException.java | 2 -
.../apache/hadoop/hbase/util/SimpleByteRange.java | 2 -
.../hadoop/hbase/util/SimpleMutableByteRange.java | 2 -
.../hbase/util/SimplePositionedByteRange.java | 2 -
.../util/SimplePositionedMutableByteRange.java | 2 -
.../org/apache/hadoop/hbase/util/VersionInfo.java | 2 -
.../hadoop/hbase/HBaseCommonTestingUtility.java | 2 -
.../client/coprocessor/AggregationClient.java | 2 +-
.../client/coprocessor/AsyncAggregationClient.java | 2 -
.../org/apache/hadoop/hbase/rest/Constants.java | 2 -
.../apache/hadoop/hbase/rest/client/Client.java | 2 -
.../apache/hadoop/hbase/rest/client/Cluster.java | 2 -
.../hadoop/hbase/rest/client/RemoteAdmin.java | 2 -
.../hadoop/hbase/rest/client/RemoteHTable.java | 2 -
.../apache/hadoop/hbase/rest/client/Response.java | 2 -
.../rest/filter/RestCsrfPreventionFilter.java | 1 -
.../org/apache/hadoop/hbase/LocalHBaseCluster.java | 2 -
.../hadoop/hbase/client/TableSnapshotScanner.java | 2 -
.../hadoop/hbase/client/locking/EntityLock.java | 2 -
.../hbase/errorhandling/ForeignException.java | 2 -
.../hbase/errorhandling/TimeoutException.java | 2 -
.../hadoop/hbase/mapred/GroupingTableMap.java | 2 -
.../hadoop/hbase/mapred/HRegionPartitioner.java | 4 +-
.../hadoop/hbase/mapred/IdentityTableMap.java | 2 -
.../hadoop/hbase/mapred/IdentityTableReduce.java | 2 -
.../mapred/MultiTableSnapshotInputFormat.java | 2 -
.../org/apache/hadoop/hbase/mapred/RowCounter.java | 2 -
.../hadoop/hbase/mapred/TableInputFormat.java | 2 -
.../hadoop/hbase/mapred/TableInputFormatBase.java | 2 -
.../org/apache/hadoop/hbase/mapred/TableMap.java | 2 -
.../hadoop/hbase/mapred/TableMapReduceUtil.java | 2 -
.../hadoop/hbase/mapred/TableOutputFormat.java | 2 -
.../hadoop/hbase/mapred/TableRecordReader.java | 2 -
.../hadoop/hbase/mapred/TableRecordReaderImpl.java | 2 -
.../apache/hadoop/hbase/mapred/TableReduce.java | 2 -
.../hbase/mapred/TableSnapshotInputFormat.java | 2 -
.../org/apache/hadoop/hbase/mapred/TableSplit.java | 2 -
.../apache/hadoop/hbase/mapreduce/CellCounter.java | 2 -
.../apache/hadoop/hbase/mapreduce/CellCreator.java | 2 -
.../apache/hadoop/hbase/mapreduce/CopyTable.java | 2 -
.../org/apache/hadoop/hbase/mapreduce/Export.java | 2 -
.../hbase/mapreduce/GroupingTableMapper.java | 2 -
.../hadoop/hbase/mapreduce/HFileOutputFormat2.java | 2 -
.../hadoop/hbase/mapreduce/HRegionPartitioner.java | 2 -
.../hbase/mapreduce/IdentityTableMapper.java | 2 -
.../hbase/mapreduce/IdentityTableReducer.java | 2 -
.../org/apache/hadoop/hbase/mapreduce/Import.java | 2 -
.../apache/hadoop/hbase/mapreduce/ImportTsv.java | 2 -
.../hbase/mapreduce/KeyValueSerialization.java | 2 -
.../hbase/mapreduce/KeyValueSortReducer.java | 2 -
.../hbase/mapreduce/LoadIncrementalHFiles.java | 2 -
.../hbase/mapreduce/MultiHFileOutputFormat.java | 2 -
.../hbase/mapreduce/MultiTableInputFormat.java | 2 -
.../hbase/mapreduce/MultiTableInputFormatBase.java | 2 -
.../hbase/mapreduce/MultiTableOutputFormat.java | 2 -
.../mapreduce/MultiTableSnapshotInputFormat.java | 2 -
.../hbase/mapreduce/MutationSerialization.java | 2 -
.../apache/hadoop/hbase/mapreduce/PutCombiner.java | 2 -
.../hadoop/hbase/mapreduce/PutSortReducer.java | 2 -
.../hbase/mapreduce/ResultSerialization.java | 2 -
.../apache/hadoop/hbase/mapreduce/RowCounter.java | 2 -
.../mapreduce/SimpleTotalOrderPartitioner.java | 2 -
.../hadoop/hbase/mapreduce/TableInputFormat.java | 2 -
.../hbase/mapreduce/TableInputFormatBase.java | 2 -
.../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 2 -
.../apache/hadoop/hbase/mapreduce/TableMapper.java | 4 +-
.../hbase/mapreduce/TableOutputCommitter.java | 2 -
.../hadoop/hbase/mapreduce/TableOutputFormat.java | 2 -
.../hadoop/hbase/mapreduce/TableRecordReader.java | 2 -
.../hbase/mapreduce/TableRecordReaderImpl.java | 2 -
.../hadoop/hbase/mapreduce/TableReducer.java | 2 -
.../hbase/mapreduce/TableSnapshotInputFormat.java | 2 -
.../apache/hadoop/hbase/mapreduce/TableSplit.java | 2 -
.../hadoop/hbase/mapreduce/TextSortReducer.java | 2 -
.../hadoop/hbase/mapreduce/TsvImporterMapper.java | 2 -
.../hbase/mapreduce/TsvImporterTextMapper.java | 2 -
.../mapreduce/VisibilityExpressionResolver.java | 2 -
.../apache/hadoop/hbase/mapreduce/WALPlayer.java | 2 -
.../org/apache/hadoop/hbase/mob/MobConstants.java | 2 -
.../hbase/regionserver/RowTooBigException.java | 2 +-
.../hadoop/hbase/security/token/TokenUtil.java | 2 -
.../security/visibility/ScanLabelGenerator.java | 2 -
.../visibility/VisibilityExpEvaluator.java | 2 -
.../visibility/VisibilityLabelService.java | 2 -
.../hadoop/hbase/snapshot/ExportSnapshot.java | 2 -
.../apache/hadoop/hbase/snapshot/SnapshotInfo.java | 2 -
.../hadoop/hbase/util/ConfigurationUtil.java | 2 -
.../apache/hadoop/hbase/util/EncryptionTest.java | 2 -
.../hbase/util/LeaseNotRecoveredException.java | 2 -
.../hbase/zookeeper/MiniZooKeeperCluster.java | 2 -
.../apache/hadoop/hbase/HBaseTestingUtility.java | 2 -
.../org/apache/hadoop/hbase/MiniHBaseCluster.java | 2 -
.../hadoop/hbase/codec/CodecPerformance.java | 2 -
.../hadoop/hbase/spark/BulkLoadPartitioner.scala | 2 -
.../hadoop/hbase/spark/ByteArrayComparable.scala | 2 -
.../hadoop/hbase/spark/ByteArrayWrapper.scala | 2 -
.../spark/ColumnFamilyQualifierMapKeyWrapper.scala | 2 -
.../hbase/spark/FamiliesQualifiersValues.scala | 2 -
.../hbase/spark/FamilyHFileWriteOptions.scala | 2 -
.../apache/hadoop/hbase/spark/HBaseContext.scala | 2 -
.../hadoop/hbase/spark/HBaseDStreamFunctions.scala | 2 -
.../hadoop/hbase/spark/HBaseRDDFunctions.scala | 2 -
.../hadoop/hbase/spark/JavaHBaseContext.scala | 2 -
.../hadoop/hbase/spark/KeyFamilyQualifier.scala | 2 -
.../apache/hadoop/hbase/spark/NewHBaseRDD.scala | 2 -
.../hbase/spark/datasources/HBaseSparkConf.scala | 2 -
.../sql/datasources/hbase/HBaseTableCatalog.scala | 2 -
397 files changed, 137 insertions(+), 877 deletions(-)
diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java
index 506ef56..8a34a64 100644
--- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java
+++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java
@@ -43,7 +43,6 @@ import java.lang.annotation.RetentionPolicy;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class InterfaceAudience {
/**
* Intended for use by any project or application.
diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java
index ac20f3a..fbe71d1 100644
--- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java
+++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java
@@ -39,7 +39,6 @@ import java.lang.annotation.RetentionPolicy;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class InterfaceStability {
/**
* Can evolve while retaining compatibility for minor release boundaries.;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java
index ed14153..c2d3a7b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Returned to the clients when their request was discarded due to server being overloaded.
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CallDroppedException extends IOException {
public CallDroppedException() {
super();
@@ -40,4 +38,4 @@ public class CallDroppedException extends IOException {
public CallDroppedException(String message) {
super(message);
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java
index 9f8b386..d615d0e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java
@@ -21,11 +21,9 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CallQueueTooBigException extends IOException {
public CallQueueTooBigException() {
super();
@@ -36,4 +34,4 @@ public class CallQueueTooBigException extends IOException {
public CallQueueTooBigException(String message) {
super(message);
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java
index b27ae82..9e3f556 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* This exception is thrown by the master when a region server clock skew is
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ClockOutOfSyncException extends IOException {
public ClockOutOfSyncException(String message) {
super(message);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index aed3af4..c51a437 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -26,7 +26,6 @@ import java.util.Set;
import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.io.VersionedWritable;
@@ -50,7 +49,6 @@ import org.apache.hadoop.io.VersionedWritable;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ClusterStatus extends VersionedWritable {
/**
* Version for object serialization. Incremented for changes in serialized
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java
index 8be2518..225ec4e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Subclass if exception is not meant to be retried: e.g.
* {@link org.apache.hadoop.hbase.UnknownScannerException}
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class DoNotRetryIOException extends HBaseIOException {
// TODO: This would be more useful as a marker interface than as a class.
private static final long serialVersionUID = 1197446454511704139L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java
index 1000d7d..2dbc93e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* persisted into store files. Response should include replay of wal content.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class DroppedSnapshotException extends IOException {
private static final long serialVersionUID = -5463156580831677374L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 545ea61..46e97c3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -27,7 +27,6 @@ import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.HBaseException;
@@ -49,7 +48,6 @@ import com.google.common.base.Preconditions;
* It is used as input when creating a table or adding a column.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HColumnDescriptor implements Comparable {
// For future backward compatibility
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
index b98d210..bc93cc6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
@@ -28,7 +28,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -76,7 +75,6 @@ import org.apache.hadoop.util.StringUtils;
* previous behavior of a range corresponding to 1 region.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HRegionInfo implements Comparable {
private static final Log LOG = LogFactory.getLog(HRegionInfo.class);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java
index edb53dc..6cf22dd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Addressing;
/**
@@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.util.Addressing;
* This interface has been marked InterfaceAudience.Public in 0.96 and 0.98.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HRegionLocation implements Comparable {
private final HRegionInfo regionInfo;
private final ServerName serverName;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index 25fd896..ed0659c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -36,7 +36,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
@@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* when the region split should occur, coprocessors associated with it etc...
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HTableDescriptor implements Comparable {
private static final Log LOG = LogFactory.getLog(HTableDescriptor.class);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
index 5d9c2ed..8fb05d5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if a request is table schema modification is requested but
* made for an invalid family name.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class InvalidFamilyOperationException extends DoNotRetryIOException {
private static final long serialVersionUID = 1L << 22 - 1L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
index d2d92b3..28ed13e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Ways to keep cells marked for delete around.
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* this way for backwards compatibility.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum KeepDeletedCells {
/** Deleted Cells are not retained. */
FALSE,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
index ddd03e8..70afee2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
@@ -21,13 +21,11 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if the master is not running
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class MasterNotRunningException extends IOException {
private static final long serialVersionUID = 1L << 23 - 1L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java
index 0153f7d..8dc92ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Enum describing all possible memory compaction policies
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum MemoryCompactionPolicy {
/**
* No memory compaction, when size threshold is exceeded data is flushed to disk
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java
index fdff554..90cd2c3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Exception thrown when the result needs to be chunked on the server side.
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* retries because some of the multi was a success.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class MultiActionResultTooLarge extends RetryImmediatelyException {
public MultiActionResultTooLarge(String s) {
super(s);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java
index a7ebf0d..10a1ed6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a namespace exists but should not
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class NamespaceExistException extends DoNotRetryIOException {
private static final long serialVersionUID = -1582357514338825412L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java
index 092290d..f61cdea 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a namespace can not be located
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class NamespaceNotFoundException extends DoNotRetryIOException {
private static final long serialVersionUID = -6673607347330260324L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
index 3cb0f5b..ce6acf7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
@@ -20,13 +20,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when an operation requires the root and all meta regions to be online
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class NotAllMetaRegionsOnlineException extends DoNotRetryIOException {
private static final long serialVersionUID = 6439786157874827523L;
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
index 8975c74..bca9cd2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* serving.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class NotServingRegionException extends IOException {
private static final long serialVersionUID = 1L << 17 - 1L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java
index a5ae44b..dc6fd4b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* This exception is thrown by the master when a region server was shut down and
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class PleaseHoldException extends HBaseIOException {
public PleaseHoldException(String message) {
super(message);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java
index 24ea16c..e0c0346 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when something happens related to region handling.
* Subclasses have to be more specific.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RegionException extends HBaseIOException {
private static final long serialVersionUID = 1473510258071111371L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java
index befb2de..d6c028d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java
@@ -23,7 +23,6 @@ package org.apache.hadoop.hbase;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
import org.apache.hadoop.hbase.util.Bytes;
@@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.util.Strings;
* Encapsulates per-region load metrics.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionLoad {
protected ClusterStatusProtos.RegionLoad regionLoadPB;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
index fd5fc26..5d4cad4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown by a region server if it will block and wait to serve a request.
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* region is compacting.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionTooBusyException extends IOException {
private static final long serialVersionUID = 1728345723728342L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java
index daf7dd5..8ad93f2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a replication peer can not be found
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ReplicationPeerNotFoundException extends DoNotRetryIOException {
private static final long serialVersionUID = 1L;
@@ -33,4 +31,4 @@ public class ReplicationPeerNotFoundException extends DoNotRetryIOException {
public ReplicationPeerNotFoundException(String peerId) {
super(peerId);
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java
index e0b90fd..b97c168 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java
@@ -20,10 +20,8 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RetryImmediatelyException extends IOException {
public RetryImmediatelyException(String s) {
super(s);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
index e884e51..8547dfb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java
@@ -27,7 +27,6 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor;
@@ -40,7 +39,6 @@ import org.apache.hadoop.hbase.util.Strings;
* This class is used for exporting current state of load on a RegionServer.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ServerLoad {
private int stores = 0;
private int storefiles = 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
index 623e8cf..6c0f3bb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a table exists but should not
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class TableExistsException extends DoNotRetryIOException {
private static final long serialVersionUID = 1L << 7 - 1L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java
index fa1f970..391fd43 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
*
@@ -26,7 +25,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
@SuppressWarnings("serial")
public class TableInfoMissingException extends HBaseIOException {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
index 9b5f728..dcfa857 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Thrown if a table should be offline but is not
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class TableNotDisabledException extends DoNotRetryIOException {
private static final long serialVersionUID = 1L << 19 - 1L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java
index 0f78ee6..5de406e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* Thrown if a table should be enabled but is not
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class TableNotEnabledException extends DoNotRetryIOException {
private static final long serialVersionUID = 262144L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java
index 8ac5e20..6344cd3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java
@@ -19,12 +19,10 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/** Thrown when a table can not be located */
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class TableNotFoundException extends DoNotRetryIOException {
private static final long serialVersionUID = 993179627856392526L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java
index 2ebba32..e9684ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.DoNotRetryRegionException;
/**
* Thrown when we are asked to operate on a region we know nothing about.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class UnknownRegionException extends DoNotRetryRegionException {
private static final long serialVersionUID = 1968858760475205392L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
index 3e7b22d..8f7d441 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* down and has cancelled all leases.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class UnknownScannerException extends DoNotRetryIOException {
private static final long serialVersionUID = 993179627856392526L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
index 422a659..c492a27 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
@@ -21,13 +21,11 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if the client can't connect to zookeeper
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ZooKeeperConnectionException extends IOException {
private static final long serialVersionUID = 1L << 23 - 1L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index cc14acd..f2fc9a5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.replication.TableCFs;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
@@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.util.Pair;
* @since 0.99.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface Admin extends Abortable, Closeable {
int getOperationTimeout();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 15497ce..a655c7d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.util.Bytes;
@@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* {@link #add(byte[], byte[], byte[])} method.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Append extends Mutation {
/**
* @param returnResults
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
index ef7a4f2..ab791c2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java
@@ -27,14 +27,12 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Pair;
/**
* The asynchronous administrative API for HBase.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface AsyncAdmin {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java
index dbe32ca..65005fa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java
@@ -23,13 +23,11 @@ import java.util.concurrent.ExecutorService;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* The asynchronous version of Connection.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface AsyncConnection extends Closeable {
/**
@@ -105,4 +103,4 @@ public interface AsyncConnection extends Closeable {
* @return an AsyncAdmin instance for cluster administration
*/
AsyncAdmin getAdmin();
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
index 402ad64..a2e193c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* The asynchronous table for normal users.
@@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* to provide a {@code ExecutorService}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface AsyncTable extends AsyncTableBase {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java
index b5a251b..73ebebb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java
@@ -30,7 +30,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
@@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* from the returned {@link CompletableFuture}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface AsyncTableBase {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java
index 2330855..1cc5f10 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java
@@ -22,7 +22,6 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* For creating {@link AsyncTable} or {@link RawAsyncTable}.
@@ -32,7 +31,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* AsyncTable/RawAsyncTable instance.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface AsyncTableBuilder {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java
index 989e8d9..13434a6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java
@@ -22,7 +22,6 @@ import java.util.concurrent.CompletableFuture;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* The asynchronous version of RegionLocator.
@@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* from the returned {@link CompletableFuture}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface AsyncTableRegionLocator {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java
index 78d3398..fd36e76 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java
@@ -22,10 +22,8 @@ package org.apache.hadoop.hbase.client;
import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Stable
public interface Attributes {
/**
* Sets an attribute.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java
index cea9304..766c28c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import java.io.Closeable;
import java.io.IOException;
@@ -61,7 +60,6 @@ import java.util.List;
* @since 1.0.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface BufferedMutator extends Closeable {
/**
* Key to use setting non-default BufferedMutator implementation in Configuration.
@@ -138,7 +136,6 @@ public interface BufferedMutator extends Closeable {
* Listens for asynchronous exceptions on a {@link BufferedMutator}.
*/
@InterfaceAudience.Public
- @InterfaceStability.Evolving
interface ExceptionListener {
public void onException(RetriesExhaustedWithDetailsException exception,
BufferedMutator mutator) throws RetriesExhaustedWithDetailsException;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
index 9c901e2..060fc77 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java
@@ -23,13 +23,11 @@ import java.util.concurrent.ExecutorService;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Parameters for instantiating a {@link BufferedMutator}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class BufferedMutatorParams implements Cloneable {
static final int UNSET = -1;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java
index 17fec2b..9432378 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Currently, there are only two compact types:
@@ -25,11 +24,10 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* {@code MOB} means do mob files compaction.
* */
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public enum CompactType {
NORMAL (0),
MOB (1);
CompactType(int value) {}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java
index b4824ef..8f2a83b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java
@@ -17,13 +17,11 @@
*/
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* POJO representing the compaction state
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum CompactionState {
NONE, MINOR, MAJOR, MAJOR_AND_MINOR;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java
index a8cd296..8eedb79 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* A cluster connection encapsulating lower level individual connections to actual servers and
@@ -50,7 +49,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* @since 0.99.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface Connection extends Abortable, Closeable {
/*
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java
index 64f337a..156a3c8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java
@@ -26,7 +26,6 @@ import java.util.concurrent.ExecutorService;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -54,7 +53,6 @@ import org.apache.hadoop.hbase.util.ReflectionUtils;
* @since 0.99.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ConnectionFactory {
public static final String HBASE_CLIENT_ASYNC_CONNECTION_IMPL = "hbase.client.async.connection.impl";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java
index 39323d6..b6e0531 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Consistency defines the expected consistency level for an operation.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum Consistency {
// developer note: Do not reorder. Client.proto#Consistency depends on this order
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index 0eb1d2b..278ea58 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
import org.apache.hadoop.hbase.util.Bytes;
@@ -69,7 +68,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* timestamp. The constructor timestamp is not referenced.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Delete extends Mutation implements Comparable {
/**
* Create a Delete operation for the specified row.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
index 3c5dd28..e0b5ead 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Similar to RegionException, but disables retries.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class DoNotRetryRegionException extends DoNotRetryIOException {
private static final long serialVersionUID = 6907047686199321701L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java
index 9b35e04..a3fa1d2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Enum describing the durability guarantees for tables and {@link Mutation}s
* Note that the items must be sorted in order of increasing durability
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum Durability {
/* Developer note: Do not rename the enum field names. They are serialized in HTableDescriptor */
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
index 3771aff..c3ddc4b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
@@ -34,7 +34,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.security.access.Permission;
@@ -65,7 +64,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* To add a filter, call {@link #setFilter(Filter) setFilter}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Get extends Query
implements Row, Comparable {
private static final Log LOG = LogFactory.getLog(Get.class);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index f3a58ad..313125c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -67,7 +66,6 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
* This class is thread safe.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HTableMultiplexer {
private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName());
@@ -271,7 +269,6 @@ public class HTableMultiplexer {
* in total or on per region server basis.
*/
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public static class HTableMultiplexerStatus {
private long totalFailedPutCounter;
private long totalBufferedPutCounter;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
index eb1cbc5..179a566 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
@@ -49,7 +48,6 @@ import org.apache.hadoop.hbase.util.ClassSize;
* {@link #addColumn(byte[], byte[], long)} method.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Increment extends Mutation implements Comparable {
private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE;
private TimeRange tr = new TimeRange();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
index 01aba6f..ad0897e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
@@ -21,7 +21,6 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Specify Isolation levels in Scan operations.
@@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* not have been committed yet.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public enum IsolationLevel {
READ_COMMITTED(1),
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java
index 7e31b25..5fa9ec2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java
@@ -17,13 +17,11 @@
*/
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Represents the master switch type
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum MasterSwitchType {
SPLIT,
MERGE
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java
index f550572..076ab6f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Enum describing the mob compact partition policy types.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum MobCompactPartitionPolicy {
/**
* Compact daily mob files into one file
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index fb55fdd..b010c2f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -58,7 +57,6 @@ import com.google.common.io.ByteArrayDataOutput;
import com.google.common.io.ByteStreams;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable,
HeapSize {
public static final long MUTATION_OVERHEAD = ClassSize.align(
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
index 126b117..e628911 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when no region server can be found for a region
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class NoServerForRegionException extends DoNotRetryRegionException {
private static final long serialVersionUID = 1L << 11 - 1L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java
index 4f25e2c..130cf16 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java
@@ -22,7 +22,6 @@ import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.JsonMapper;
/**
@@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.JsonMapper;
* Contains methods for exposure to logging and debugging tools.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public abstract class Operation {
// TODO make this configurable
// TODO Do we need this anymore now we have protobuffed it all?
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
index cc863b9..ba21cbb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
@@ -24,12 +24,10 @@ import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public abstract class OperationWithAttributes extends Operation implements Attributes {
// An opaque blob of attributes
private Map attributes;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
index 701dceb..5c3ac4b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.IndividualBytesFieldCell;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.security.access.Permission;
import org.apache.hadoop.hbase.security.visibility.CellVisibility;
@@ -51,7 +50,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* setting the timestamp.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Put extends Mutation implements HeapSize, Comparable {
/**
* Create a Put operation for the specified row.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
index 1322ef5..7f50d13 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
@@ -22,7 +22,6 @@ import java.util.Map;
import com.google.common.collect.Maps;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.TimeRange;
@@ -38,7 +37,6 @@ import com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.util.Bytes;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public abstract class Query extends OperationWithAttributes {
private static final String ISOLATION_LEVEL = "_isolationlevel_";
protected Filter filter = null;
@@ -275,4 +273,4 @@ public abstract class Query extends OperationWithAttributes {
public Map getColumnFamilyTimeRange() {
return this.colFamTimeRangeMap;
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java
index e493123..4a916d3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java
@@ -26,7 +26,6 @@ import java.util.function.Function;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* A low level asynchronous table.
@@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* most features of AsyncTable, we can think about merge these two interfaces.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface RawAsyncTable extends AsyncTableBase {
/**
@@ -93,7 +91,6 @@ public interface RawAsyncTable extends AsyncTableBase {
*
*/
@InterfaceAudience.Public
- @InterfaceStability.Unstable
@FunctionalInterface
interface CoprocessorCallable {
@@ -175,7 +172,6 @@ public interface RawAsyncTable extends AsyncTableBase {
*
*/
@InterfaceAudience.Public
- @InterfaceStability.Unstable
interface CoprocessorCallback {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java
index 899c0bb..820960b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
@@ -32,14 +31,12 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
* consuming tasks in all methods below unless you know what you are doing.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface RawScanResultConsumer {
/**
* Used to resume a scan.
*/
@InterfaceAudience.Public
- @InterfaceStability.Unstable
interface ScanResumer {
/**
@@ -60,7 +57,6 @@ public interface RawScanResultConsumer {
* thrown if you have already called one of the methods.
*/
@InterfaceAudience.Public
- @InterfaceStability.Unstable
interface ScanController {
/**
@@ -122,4 +118,4 @@ public interface RawScanResultConsumer {
*/
default void onScanMetricsCreated(ScanMetrics scanMetrics) {
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java
index bfdb216..5f9cd6d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* POJO representing region server load
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionLoadStats {
int memstoreLoad;
int heapOccupancy;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java
index 39518a6..2c96a4a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java
@@ -25,7 +25,6 @@ import java.util.List;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Pair;
/**
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Pair;
* @since 0.99.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface RegionLocator extends Closeable {
/**
* Finds the region on which the given row is being served. Does not reload the cache.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java
index d6cceb9..018bc69 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java
@@ -20,11 +20,9 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.RegionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/** Thrown when a table can not be located */
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionOfflineException extends RegionException {
private static final long serialVersionUID = 466008402L;
/** default constructor */
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java
index 46e730e..33fed2c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java
@@ -25,17 +25,14 @@ import java.util.function.Consumer;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* An interface for client request scheduling algorithm.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface RequestController {
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public enum ReturnCode {
/**
* Accept current row.
@@ -55,7 +52,6 @@ public interface RequestController {
* Picks up the valid data.
*/
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public interface Checker {
/**
* Checks the data whether it is valid to submit.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java
index 7ed80f0..ba4babd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java
@@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.ReflectionUtils;
/**
* A factory class that constructs an {@link org.apache.hadoop.hbase.client.RequestController}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class RequestControllerFactory {
public static final String REQUEST_CONTROLLER_IMPL_CONF_KEY = "hbase.client.request.controller.impl";
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
index f8682ec..63aab80 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -79,7 +78,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* in then use {@link #copyFrom(Result)}
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Result implements CellScannable, CellScanner {
private Cell[] cells;
private Boolean exists; // if the query was just to check existence.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
index 8951e84..ef8d887 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
@@ -26,14 +26,12 @@ import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
/**
* Interface for client-side scanning. Go to {@link Table} to obtain instances.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public interface ResultScanner extends Closeable, Iterable {
@Override
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
index dc25f64..eec9f62 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
@@ -23,14 +23,12 @@ import java.util.Date;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Exception thrown by HTable methods when an attempt to do something (like
* commit changes) fails after a bunch of retries.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RetriesExhaustedException extends IOException {
private static final long serialVersionUID = 1876775844L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index 8b09222..70d5548 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -30,7 +30,6 @@ import java.util.Set;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RetriesExhaustedWithDetailsException
extends RetriesExhaustedException {
List exceptions;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java
index cea45fc..79f27bc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Has a row.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public interface Row extends Comparable {
/**
* @return The row.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java
index 85fd590..758bce6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.client;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Provide a way to access the inner buffer.
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* @param
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface RowAccess extends Iterable {
/**
* @return true if there are no elements.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
index 8a8193e..a9384ac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
@@ -24,7 +24,6 @@ import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* into Sets or using them as keys in Maps.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RowMutations implements Row {
private final List mutations;
private final byte [] row;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
index 69b57b0..e32127c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Gets or Scans throw this exception if running without in-row scan flag
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* hbase.table.max.rowsize).
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RowTooBigException extends DoNotRetryRegionException {
public RowTooBigException(String message) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
index 2b2e4c8..67d1b4d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
@@ -18,12 +18,10 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import java.io.IOException;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface RpcRetryingCaller {
void cancel();
@@ -49,4 +47,4 @@ public interface RpcRetryingCaller {
*/
T callWithoutRetries(RetryingCallable callable, int callTimeout)
throws IOException, RuntimeException;
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 0047d2f..7bc78d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -33,7 +33,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
@@ -87,7 +86,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* instance per usage.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class Scan extends Query {
private static final Log LOG = LogFactory.getLog(Scan.class);
@@ -1131,7 +1129,6 @@ public class Scan extends Query {
}
@InterfaceAudience.Public
- @InterfaceStability.Unstable
public enum ReadType {
DEFAULT, STREAM, PREAD
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java
index 03b1ba0..5a4170f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java
@@ -18,14 +18,12 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
/**
* Receives {@link Result} for an asynchronous scan.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface ScanResultConsumer {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
index d70c76f..72b2a15 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
@@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat
* networking, etc..) when talking to a local master
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ShortCircuitMasterConnection implements MasterKeepAliveConnection {
private final MasterService.BlockingInterface stub;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
index b9b6b6c..9a1e1cb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
@@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* The POJO equivalent of HBaseProtos.SnapshotDescription
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class SnapshotDescription {
private final String name;
private final TableName table;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java
index e3e12bd..c9820ca 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java
@@ -17,13 +17,11 @@
*/
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* POJO representing the snapshot type
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum SnapshotType {
DISABLED, FLUSH, SKIPFLUSH;
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java
index fa4e5f1..b74823c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java
@@ -28,7 +28,6 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
@@ -39,7 +38,6 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
* {@link org.apache.hadoop.hbase.client.Table#coprocessorService(byte[])}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel {
private static final Log LOG = LogFactory.getLog(SyncCoprocessorRpcChannel.class);
@@ -77,4 +75,4 @@ abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel {
protected abstract Message callExecService(RpcController controller,
Descriptors.MethodDescriptor method, Message request, Message responsePrototype)
throws IOException;
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 90fee8d..933329f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
@@ -49,7 +48,6 @@ import com.google.protobuf.ServiceException;
* @since 0.99.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface Table extends Closeable {
/**
* Gets the fully qualified table name instance of this table.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java
index 27e1596..3eedb10 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* For creating {@link Table} instance.
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* Table instance.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface TableBuilder {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
index 33aef79..742acee 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
@@ -21,10 +21,8 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
class UnmodifyableHRegionInfo extends HRegionInfo {
/*
* Creates an unmodifyable copy of an HRegionInfo
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
index 59a1bd5..b5f5ae9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
@@ -22,13 +22,11 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Read-only table descriptor.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class UnmodifyableHTableDescriptor extends HTableDescriptor {
/**
* Default constructor.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java
index e0609da..69729f5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java
@@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class WrongRowIOException extends HBaseIOException {
private static final long serialVersionUID = -5849522209440123059L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java
index 94e434f..2c7b139 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.client.backoff;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Configurable policy for the amount of time a client should wait for a new request to the
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public interface ClientBackoffPolicy {
public static final String BACKOFF_POLICY_CLASS =
@@ -39,4 +37,4 @@ public interface ClientBackoffPolicy {
* @return the number of ms to wait on the client based on the
*/
public long getBackoffTime(ServerName serverName, byte[] region, ServerStatistics stats);
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
index b41133a..a39bd96 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
@@ -23,7 +23,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import com.google.common.base.Preconditions;
@@ -32,7 +31,6 @@ import com.google.common.base.Preconditions;
* max backoff to generate the backoff time.
*/
@InterfaceAudience.Public
-@InterfaceStability.Unstable
public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy {
private static final Log LOG = LogFactory.getLog(ExponentialClientBackoffPolicy.class);
@@ -104,4 +102,4 @@ public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy {
"Value %s must be within the range [%s,%s]", valueIn, baseMin, baseMax);
return ((limitMax - limitMin) * (valueIn - baseMin) / (baseMax - baseMin)) + limitMin;
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
index a9c23cc..0b783d7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
@@ -22,14 +22,12 @@ package org.apache.hadoop.hbase.client.coprocessor;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* A collection of interfaces and utilities used for interacting with custom RPC
* interfaces exposed by Coprocessors.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public abstract class Batch {
/**
* Defines a unit of work to be executed.
@@ -50,7 +48,6 @@ public abstract class Batch {
* @param the return type from {@link Batch.Call#call(Object)}
*/
@InterfaceAudience.Public
- @InterfaceStability.Stable
public interface Call {
R call(T instance) throws IOException;
}
@@ -72,7 +69,6 @@ public abstract class Batch {
* org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
*/
@InterfaceAudience.Public
- @InterfaceStability.Stable
public interface Callback {
void update(byte[] region, byte[] row, R result);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
index 73b3892..7813527 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client.metrics;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
@@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* for now.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ScanMetrics extends ServerSideScanMetrics {
// AtomicLongs to hold the metrics values. These are all updated through ClientScanner and
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java
index b14938b..8a96aeb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java
@@ -22,7 +22,6 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import com.google.common.collect.ImmutableMap;
@@ -30,7 +29,6 @@ import com.google.common.collect.ImmutableMap;
* Provides server side metrics related to scan operations.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ServerSideScanMetrics {
/**
* Hash to hold the String -> Atomic Long mappings for each metric
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
index 0eae10b..94425f9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
@@ -86,7 +85,6 @@ import com.google.common.collect.Lists;
* @deprecated use {@link org.apache.hadoop.hbase.client.Admin} instead.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
@Deprecated
public class ReplicationAdmin implements Closeable {
private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java
index f293586..854517e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java
@@ -23,14 +23,12 @@ import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Used by {@link org.apache.hadoop.hbase.client.Admin#listReplicatedTableCFs()}.
* The cfs is a map of .
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class TableCFs {
private final TableName table;
private final Map cfs;
@@ -58,4 +56,4 @@ public class TableCFs {
}
return sb.toString();
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
index 1847b2e..ab70616 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.client.security;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Available security capabilities
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum SecurityCapability {
// Note to implementors: These must match the numbering of Capability values in MasterProtos
SIMPLE_AUTHENTICATION(0),
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java
index 3b01a9e..8c0d054 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if a coprocessor rules we should bypass an operation
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class BypassCoprocessorException extends CoprocessorException {
private static final long serialVersionUID = 5943889011582357043L;
@@ -41,4 +39,4 @@ public class BypassCoprocessorException extends CoprocessorException {
public BypassCoprocessorException(String s) {
super(s);
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
index 9946d97..541392c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if a coprocessor encounters any exception.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CoprocessorException extends DoNotRetryIOException {
private static final long serialVersionUID = 4357922136679804887L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
index 43a4ee4..74621ab 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java
@@ -39,7 +39,6 @@ package org.apache.hadoop.hbase.exceptions;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when the client believes that we are trying to communicate to has
@@ -49,7 +48,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* retries and fast fail the operation.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ConnectionClosingException extends IOException {
public ConnectionClosingException(String string) {
super(string);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
index 5bfd2f3..050b2c1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Exception thrown if a mutation fails sanity checks.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class FailedSanityCheckException extends org.apache.hadoop.hbase.DoNotRetryIOException {
private static final long serialVersionUID = 1788783640409186240L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
index b87e400..2291053 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.DoNotRetryRegionException;
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.DoNotRetryRegionException;
* Thrown when something is wrong in trying to merge two regions.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class MergeRegionException extends DoNotRetryRegionException {
private static final long serialVersionUID = 4970899110066124122L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
index b31e055..63f8929 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java
@@ -23,7 +23,6 @@ import java.net.ConnectException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when the client believes that we are trying to communicate to has
@@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* retries and fast fail the operation.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class PreemptiveFastFailException extends ConnectException {
private static final long serialVersionUID = 7129103682617007177L;
private long failureCount, timeOfFirstFailureMilliSec, timeOfLatestAttemptMilliSec;
@@ -107,4 +105,4 @@ public class PreemptiveFastFailException extends ConnectException {
public boolean isGuaranteedClientSideOnly() {
return guaranteedClientSideOnly;
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java
index 06db472..78ea099 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a read request issued against a region which is in recovering state.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionInRecoveryException extends NotServingRegionException {
private static final long serialVersionUID = 327302071153799L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java
index 0021f4a..c71bc6d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when the size of the rpc request received by the server is too large.
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* @since 1.3.0
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RequestTooBigException extends DoNotRetryIOException {
private static final long serialVersionUID = -1593339239809586516L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java
index 7689eb1..0704189 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java
@@ -20,14 +20,12 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when the server side has received an Exception, and asks the Client to reset the scanner
* state by closing the current region scanner, and reopening from the start of last seen row.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ScannerResetException extends DoNotRetryIOException {
private static final long serialVersionUID = -5649728171144849619L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
index 933e888..9fbc67d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java
@@ -20,14 +20,12 @@
package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* An error requesting an RPC protocol that the server is not serving.
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class UnknownProtocolException extends org.apache.hadoop.hbase.DoNotRetryIOException {
private Class> protocol;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
index b59398b..87b622c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
@@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
@@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class BinaryComparator extends org.apache.hadoop.hbase.filter.ByteArrayComparable {
/**
* Constructor
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
index 01cb769..2c951f6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
@@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
@@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* {@link BinaryComparator}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class BinaryPrefixComparator extends ByteArrayComparable {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
index dac8864..96ef2e1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
@@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
@@ -34,12 +33,10 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* with the specified byte array. Then returns whether the result is non-zero.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class BitComparator extends ByteArrayComparable {
/** Bit operators. */
@InterfaceAudience.Public
- @InterfaceStability.Stable
public enum BitwiseOp {
/** and */
AND,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index 3ae20a1..dad4132 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* makes this filter unsuitable as a Scan filter.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ColumnCountGetFilter extends FilterBase {
private int limit = 0;
private int count = 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 7d4571e..696f868 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
* for pagination.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ColumnPaginationFilter extends FilterBase {
private int limit = 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index 7230d3a..b6e9607 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
@@ -42,7 +41,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
* columns like 'and', 'anti' but not keys with columns like 'ball', 'act'.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ColumnPrefixFilter extends FilterBase {
protected byte [] prefix = null;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 99f9926..69b5088 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -50,7 +49,6 @@ import com.google.common.base.Preconditions;
* or not.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ColumnRangeFilter extends FilterBase {
protected byte[] minColumn = null;
protected boolean minColumnInclusive = true;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
index bbc31ec..451d7ee 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
@@ -25,7 +25,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -49,12 +48,10 @@ import com.google.common.base.Preconditions;
* Multiple filters can be combined using {@link FilterList}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public abstract class CompareFilter extends FilterBase {
/** Comparison operators. */
@InterfaceAudience.Public
- @InterfaceStability.Stable
public enum CompareOp {
/** less than */
LESS,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index d82eaec..c14314c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -28,7 +28,6 @@ import java.util.Set;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
* full rows for correct filtering
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class DependentColumnFilter extends CompareFilter {
protected byte[] columnFamily;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index 8dfd2ca..a28855d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* directly rather than a filter.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class FamilyFilter extends CompareFilter {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 59aa855..de5d6c6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -24,7 +24,6 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
/**
@@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
* @see FilterBase
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public abstract class Filter {
protected transient boolean reversed;
/**
@@ -156,7 +154,6 @@ public abstract class Filter {
* Return codes for filterValue().
*/
@InterfaceAudience.Public
- @InterfaceStability.Stable
public enum ReturnCode {
/**
* Include the Cell
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 04eba0c..d533026 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -52,11 +51,9 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* Defaults to {@link Operator#MUST_PASS_ALL}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
final public class FilterList extends FilterBase {
/** set operator */
@InterfaceAudience.Public
- @InterfaceStability.Stable
public static enum Operator {
/** !AND */
MUST_PASS_ALL,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index 14d23d4..8493610 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -23,7 +23,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* This filter can be used to more efficiently perform row count operations.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class FirstKeyOnlyFilter extends FilterBase {
private boolean foundKV = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index 4681fd3..ac5f125 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -24,7 +24,6 @@ import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -46,7 +45,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
* @deprecated Deprecated in 2.0. See HBASE-13347
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
@Deprecated
public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 65c2a61..895ffc8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -59,7 +58,6 @@ import com.google.common.annotations.VisibleForTesting;
* mask is "????_99_????_01", where at ? can be any value.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FuzzyRowFilter extends FilterBase {
private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
private List> fuzzyKeysData;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index 7aa807c..ed95a7d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -40,7 +39,6 @@ import com.google.common.base.Preconditions;
* Use this filter to include the stop row, eg: [A,Z].
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class InclusiveStopFilter extends FilterBase {
private byte [] stopRowKey;
private boolean done = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java
index 8eba03c..6410ab4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.filter;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Used to indicate a filter incompatibility
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class IncompatibleFilterException extends RuntimeException {
private static final long serialVersionUID = 3236763276623198231L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java
index 81aae0b..0406058 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase.filter;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Used to indicate an invalid RowFilter.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class InvalidRowFilterException extends RuntimeException {
private static final long serialVersionUID = 2667894046345657865L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index adbf304..b082941 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -43,7 +42,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* the values.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class KeyOnlyFilter extends FilterBase {
boolean lenAsVal;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
index 8bcc7b2..429b498 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
@@ -23,7 +23,6 @@ import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
@@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* A long comparator which numerical compares against the specified byte array
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class LongComparator extends ByteArrayComparable {
private long longValue;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 77fbaf4..d398349 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -25,7 +25,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -48,7 +47,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* specified list and perform fast-forwarding during scan. Thus, the scan will be quite efficient.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class MultiRowRangeFilter extends FilterBase {
private List rangeList;
@@ -413,7 +411,6 @@ public class MultiRowRangeFilter extends FilterBase {
}
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public static class RowRange implements Comparable {
private byte[] startRow;
private boolean startRowInclusive = true;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index 12d9ac7..6bcb561 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -25,7 +25,6 @@ import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* columns like 'and', 'anti' but not keys with columns like 'ball', 'act'.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class MultipleColumnPrefixFilter extends FilterBase {
protected byte [] hint = null;
protected TreeSet sortedPrefixes = createTreeSet();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index 0d60e2e..a72afca 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
@@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class NullComparator extends ByteArrayComparable {
public NullComparator() {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index 2b91b7a..894e7b4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -23,7 +23,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* locally.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class PageFilter extends FilterBase {
private long pageSize = Long.MAX_VALUE;
private int rowsAccepted = 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java
index 3a20772..785f3f3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java
@@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* ParseConstants holds a bunch of constants related to parsing Filter Strings
* Used by {@link ParseFilter}
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public final class ParseConstants {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
index 0823785..21cdd9c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
@@ -33,7 +33,6 @@ import java.util.Stack;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
@@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* Filter Language can be found at: https://issues.apache.org/jira/browse/HBASE-4176
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ParseFilter {
private static final Log LOG = LogFactory.getLog(ParseFilter.class);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index e3cefe5..33b3ead 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.ByteBufferCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
* Pass results that have same row prefix.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class PrefixFilter extends FilterBase {
protected byte [] prefix = null;
protected boolean passedPrefix = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index cc240f8..72a50fb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* directly rather than a filter.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class QualifierFilter extends CompareFilter {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
index 177ed4d..48413ac 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
@@ -23,7 +23,6 @@ import java.util.Random;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RandomRowFilter extends FilterBase {
protected static final Random random = new Random();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
index 3f05901..2f5a342 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
@@ -26,7 +26,6 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* @see java.util.regex.Pattern
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RegexStringComparator extends ByteArrayComparable {
private static final Log LOG = LogFactory.getLog(RegexStringComparator.class);
@@ -80,7 +78,6 @@ public class RegexStringComparator extends ByteArrayComparable {
/** Engine implementation type (default=JAVA) */
@InterfaceAudience.Public
- @InterfaceStability.Stable
public enum EngineType {
JAVA,
JONI
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
index 4f91f8b..3f6136f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* and stop rows directly rather than a filter.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class RowFilter extends CompareFilter {
private boolean filterOutRow = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
index 192fd97..6b155b0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
@@ -27,7 +27,6 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* needed as input (besides for the filtering itself).
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index 3bb80cb..0dbc0bb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException;
@@ -68,7 +67,6 @@ import com.google.common.base.Preconditions;
* To filter based on the value of all scanned columns, use {@link ValueFilter}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class SingleColumnValueFilter extends FilterBase {
protected byte [] columnFamily;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
index 5461011..1cdf206 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
@@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -51,7 +50,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class SkipFilter extends FilterBase {
private boolean filterRow = false;
private Filter filter;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
index d30d057..157d97c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.filter;
import java.util.Locale;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.util.Bytes;
@@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class SubstringComparator extends ByteArrayComparable {
private String substr;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
index 8c58f91..b1409e3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
@@ -25,7 +25,6 @@ import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -42,7 +41,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* or {@link org.apache.hadoop.hbase.client.Scan#setTimeStamp(long)}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class TimestampsFilter extends FilterBase {
private final boolean canHint;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
index c0dacaf..4edb57b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
@@ -24,7 +24,6 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* use {@link SingleColumnValueFilter}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ValueFilter extends CompareFilter {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
index 8738962..6de3676 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
@@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE
* returns true.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class WhileMatchFilter extends FilterBase {
private boolean filterAllRemaining = false;
private Filter filter;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java
index 7eb96d5..010dbb9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java
@@ -18,10 +18,8 @@
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class BadAuthException extends FatalConnectionException {
public BadAuthException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java
index a6777c0..d7f8c1e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java
@@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Client side call cancelled.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CallCancelledException extends HBaseIOException {
private static final long serialVersionUID = 309775809470318208L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java
index db8c34a..9a67aeb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java
@@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Client-side call timeout
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CallTimeoutException extends HBaseIOException {
public CallTimeoutException(final String msg) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java
index 6fd038f..c6dec2d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Exception indicating that the remote host making this IPC lost its
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* but is only used for logging on the server side, etc.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CallerDisconnectedException extends IOException {
private static final long serialVersionUID = 1L;
public CallerDisconnectedException(String msg) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java
index ffd27b3..d93d9f6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java
@@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if a cellscanner but no codec to encode it with.
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CellScannerButNoCodecException extends HBaseIOException {
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
index 7ed1f7d..a9c10ce 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.ipc;
import com.google.protobuf.BlockingRpcChannel;
import com.google.protobuf.RpcChannel;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Base interface which provides clients with an RPC connection to
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* {@link org.apache.hadoop.hbase.client.Table#coprocessorService(byte[])}.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface CoprocessorRpcChannel extends RpcChannel, BlockingRpcChannel {}
// This Interface is part of our public, client-facing API!!!
-// This belongs in client package but it is exposed in our public API so we cannot relocate.
\ No newline at end of file
+// This belongs in client package but it is exposed in our public API so we cannot relocate.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java
index 12f6451..e50a82e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Indicates that we're trying to connect to a already known as dead server. We will want to
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FailedServerException extends HBaseIOException {
public FailedServerException(String s) {
super(s);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java
index 721148b..ac9fa97 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java
@@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Indicate that the rpc server tells client to fallback to simple auth but client is disabled to do
* so.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FallbackDisallowedException extends HBaseIOException {
private static final long serialVersionUID = -6942845066279358253L;
@@ -35,4 +33,4 @@ public class FallbackDisallowedException extends HBaseIOException {
super("Server asks us to fall back to SIMPLE auth, "
+ "but this client is configured to only allow secure connections.");
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java
index 86d3b89..74bd42f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when server finds fatal issue w/ connection setup: e.g. bad rpc version
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FatalConnectionException extends DoNotRetryIOException {
public FatalConnectionException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
index a8af69c..fe039d3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java
@@ -28,7 +28,6 @@ import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Pair;
/**
@@ -39,7 +38,6 @@ import org.apache.hadoop.hbase.util.Pair;
* the whole process.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class NettyRpcClientConfigHelper {
public static final String EVENT_LOOP_CONFIG = "hbase.rpc.client.event-loop.config";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java
index 0e50943..eff5b7f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.util.DynamicClassLoader;
import org.apache.hadoop.ipc.RemoteException;
@@ -37,7 +36,6 @@ import org.apache.hadoop.ipc.RemoteException;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value = "DP_CREATE_CLASSLOADER_INSIDE_DO_PRIVILEGED", justification = "None. Address sometime.")
public class RemoteWithExtrasException extends RemoteException {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java
index 6d0b9de..15db1c0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java
@@ -22,11 +22,9 @@ package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ServerNotRunningYetException extends IOException {
public ServerNotRunningYetException(String s) {
super(s);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java
index 0dd8e64..e2ed361 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java
@@ -22,16 +22,14 @@ import java.net.InetSocketAddress;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Throw this in RPC call if there are too many pending requests for one region server
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ServerTooBusyException extends DoNotRetryIOException {
public ServerTooBusyException(InetSocketAddress address, long count) {
super("Busy Server! " + count + " concurrent RPCs against " + address);
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java
index a224a12..63ce25b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java
@@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class StoppedRpcClientException extends HBaseIOException {
public StoppedRpcClientException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java
index 3208876..19914e1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java
@@ -18,10 +18,8 @@
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class UnsupportedCellCodecException extends FatalConnectionException {
public UnsupportedCellCodecException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java
index 7ca7dd5..271bd2b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java
@@ -18,10 +18,8 @@
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class UnsupportedCompressionCodecException extends FatalConnectionException {
public UnsupportedCompressionCodecException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java
index 12e4a7a..5f2fa07 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java
@@ -19,10 +19,8 @@
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class UnsupportedCryptoException extends FatalConnectionException {
public UnsupportedCryptoException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java
index 73bd10d..d63c867 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java
@@ -18,10 +18,8 @@
package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class WrongVersionException extends FatalConnectionException {
public WrongVersionException() {
super();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java
index e0386b5..0ab75da 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.quotas;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Generic quota exceeded exception
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class QuotaExceededException extends DoNotRetryIOException {
public QuotaExceededException(String msg) {
super(msg);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java
index 309dd9c..b8a99a6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java
@@ -21,14 +21,12 @@ import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Strings;
/**
* Filter to use to filter the QuotaRetriever results.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class QuotaFilter {
private Set types = new HashSet<>();
private boolean hasFilters = false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
index fecd2d1..cba6a24 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
@@ -29,7 +29,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
@@ -43,7 +42,6 @@ import org.apache.hadoop.util.StringUtils;
* Scanner to iterate over the quota settings.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class QuotaRetriever implements Closeable, Iterable {
private static final Log LOG = LogFactory.getLog(QuotaRetriever.class);
@@ -182,4 +180,4 @@ public class QuotaRetriever implements Closeable, Iterable {
scanner.init(conf, scan);
return scanner;
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java
index 2e215b6..4a7d241 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java
@@ -18,14 +18,12 @@
package org.apache.hadoop.hbase.quotas;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Describe the Scope of the quota rules.
* The quota can be enforced at the cluster level or at machine level.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum QuotaScope {
/**
* The specified throttling rules will be applied at the cluster level.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java
index ac6a396..193d165 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java
@@ -21,12 +21,10 @@ import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public abstract class QuotaSettings {
private final String userName;
private final String namespace;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java
index 1a8b934..3622a32 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java
@@ -23,14 +23,12 @@ import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class QuotaSettingsFactory {
static class QuotaGlobalsSettingsBypass extends QuotaSettings {
private final boolean bypassGlobals;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java
index 40a8b66..1ec649f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.quotas;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Describe the Quota Type.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum QuotaType {
THROTTLE,
GLOBAL_BYPASS,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java
index 9b456c2..724c880 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.quotas;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Describe the Throttle Type.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum ThrottleType {
/** Throttling based on the number of requests per time-unit */
REQUEST_NUMBER,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java
index 293e9c6..ec665ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java
@@ -22,7 +22,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Describe the throttling result.
@@ -32,12 +31,10 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* as result of this exception.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ThrottlingException extends QuotaExceededException {
private static final long serialVersionUID = 1406576492085155743L;
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public enum Type {
NumRequestsExceeded,
RequestSizeExceeded,
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java
index 50b8b15..073233b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java
@@ -20,10 +20,8 @@
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum BloomType {
/**
* Bloomfilters disabled
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java
index d1fdae3..e4de9c7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Reports a problem with a lease
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class LeaseException extends DoNotRetryIOException {
private static final long serialVersionUID = 8179703995292418650L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
index d3b1ec1..e2b7bba 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if request for nonexistent column family.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class NoSuchColumnFamilyException extends DoNotRetryIOException {
private static final long serialVersionUID = -6569952730832331274L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java
index ddc2270..54d973b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown by the region server when it is aborting.
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionServerAbortedException extends RegionServerStoppedException {
public RegionServerAbortedException(String s) {
super(s);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
index a4a9720..e8651db 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
@@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown if the region server log directory exists (which indicates another
* region server is running at the same address)
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionServerRunningException extends IOException {
private static final long serialVersionUID = 1L << 31 - 1L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java
index 95f697e..99af432 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown by the region server when it is in shutting down state.
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RegionServerStoppedException extends IOException {
public RegionServerStoppedException(String s) {
super(s);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java
index c2460d4..eb69e33 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java
@@ -21,13 +21,11 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a request contains a key which is not part of this region
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class WrongRegionException extends IOException {
private static final long serialVersionUID = 993179627856392526L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java
index cc42819..c614a57 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java
@@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.regionserver.wal;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when we fail close of the write-ahead-log file.
* Package private. Only used inside this package.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FailedLogCloseException extends IOException {
private static final long serialVersionUID = 1759152841462990925L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java
index 4c6ef45..ff79716 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java
@@ -19,14 +19,12 @@
package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when we fail close of the write-ahead-log file.
* Package private. Only used inside this package.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FailedSyncBeforeLogCloseException extends FailedLogCloseException {
private static final long serialVersionUID = 1759152841462990925L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
index 937e943..66781f1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.replication;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.HBaseException;
/**
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.exceptions.HBaseException;
* store, loss of connection to a peer cluster or errors during deserialization of replication data.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ReplicationException extends HBaseException {
private static final long serialVersionUID = -8885598603988198062L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
index f7cc2dd..badec0e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java
@@ -27,14 +27,12 @@ import java.util.TreeMap;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
* A configuration for the replication peer cluster.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ReplicationPeerConfig {
private String clusterKey;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
index 577d13a..95c84c2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.replication;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* The POJO equivalent of ReplicationProtos.ReplicationPeerDescription
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ReplicationPeerDescription {
private final String id;
@@ -56,4 +54,4 @@ public class ReplicationPeerDescription {
builder.append(", config : " + config);
return builder.toString();
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
index 07b871d..cf57517 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
@@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.security;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Exception thrown by access-related methods.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class AccessDeniedException extends DoNotRetryIOException {
private static final long serialVersionUID = 1913879564363001780L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
index 1c4a868..5c89c3f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Table;
@@ -43,7 +42,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* Utility client for doing access control admin operations.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class AccessControlClient {
public static final TableName ACL_TABLE_NAME =
TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl");
@@ -295,4 +293,4 @@ public class AccessControlClient {
}
return permList;
}
-}
\ No newline at end of file
+}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java
index f5d16d4..52d10c4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java
@@ -19,10 +19,8 @@
package org.apache.hadoop.hbase.security.access;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface AccessControlConstants {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index 7bf5304..b25783d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -27,7 +27,6 @@ import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.VersionedWritable;
@@ -40,12 +39,10 @@ import com.google.common.collect.Maps;
* @see TablePermission
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class Permission extends VersionedWritable {
protected static final byte VERSION = 0;
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public enum Action {
READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A');
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
index 5fdeee9..63a4d09 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java
@@ -22,14 +22,12 @@ import java.util.Collections;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* This class contains visibility labels associated with a Scan/Get deciding which all labeled data
* current scan/get can access.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class Authorizations {
private List labels;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java
index 765559f..8cd1ae7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.security.visibility;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* operators AND(&), OR(|) and NOT(!)
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CellVisibility {
private String expression;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java
index d11c167..8d20de8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java
@@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.security.visibility;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class InvalidLabelException extends DoNotRetryIOException {
private static final long serialVersionUID = 1L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java
index 3fbf937..3fb039a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java
@@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.security.visibility;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class LabelAlreadyExistsException extends DoNotRetryIOException {
private static final long serialVersionUID = 1L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index d87bf14..d99f454 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -26,7 +26,6 @@ import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;
@@ -53,7 +52,6 @@ import com.google.protobuf.ServiceException;
* Utility client for doing visibility labels admin operations.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class VisibilityClient {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
index 90dd0a7..4d87bdf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.security.visibility;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/*
* This exception indicates that VisibilityController hasn't finished initialization.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class VisibilityControllerNotReadyException extends IOException {
private static final long serialVersionUID = 1725986525207989173L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
index d0ef28d..7a1761c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.SnapshotDescription;
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class CorruptedSnapshotException extends HBaseSnapshotException {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
index 05f3556..f6817e7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Thrown when a snapshot could not be exported due to an error during the operation.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
@SuppressWarnings("serial")
public class ExportSnapshotException extends HBaseSnapshotException {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
index 2fe58ed..bd185a1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.SnapshotDescription;
/**
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HBaseSnapshotException extends DoNotRetryIOException {
private SnapshotDescription description;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
index 70e8d3b..de58077 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.SnapshotDescription;
/**
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class RestoreSnapshotException extends HBaseSnapshotException {
public RestoreSnapshotException(String msg, SnapshotDescription desc) {
super(msg, desc);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
index 2738b3d..9cfe83a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.SnapshotDescription;
/**
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class SnapshotCreationException extends HBaseSnapshotException {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
index e088408..ae574b4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.SnapshotDescription;
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class SnapshotDoesNotExistException extends HBaseSnapshotException {
/**
* @param msg full description of the failure
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
index 425f93a..9b31625 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.SnapshotDescription;
/**
@@ -26,7 +25,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription;
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class SnapshotExistsException extends HBaseSnapshotException {
public SnapshotExistsException(String msg) {
super(msg);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java
index b27ff65..343d702 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java
@@ -21,14 +21,12 @@ import java.io.IOException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Thrown if a table should be online/offline but is partially open
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class TablePartiallyOpenException extends IOException {
private static final long serialVersionUID = 3571982660065058361L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java
index e4242f5..dc6dd56 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java
@@ -18,14 +18,12 @@
package org.apache.hadoop.hbase.snapshot;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Exception thrown when we get a request for a snapshot we don't recognize.
*/
@SuppressWarnings("serial")
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class UnknownSnapshotException extends HBaseSnapshotException {
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java
index 2cd1d00..1613582 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java
@@ -22,11 +22,9 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/** Thrown when the file system needs to be upgraded */
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class FileSystemVersionException extends IOException {
private static final long serialVersionUID = 1004053363L;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
index 2a9987c..dddd052 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java
@@ -22,14 +22,12 @@ import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
/**
* Utility class for converting objects to JSON
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class JsonMapper {
private JsonMapper() {
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
index 9acbb43..c5af6ff 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -40,26 +40,32 @@ import org.apache.hadoop.hbase.ClassFinder.Not;
import org.apache.hadoop.hbase.ClassTestFinder.TestClassFilter;
import org.apache.hadoop.hbase.ClassTestFinder.TestFileNameFilter;
import org.junit.Assert;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
- * Test cases for ensuring our client visible classes have annotations
- * for {@link InterfaceAudience}.
- *
- * All classes in hbase-client and hbase-common module MUST have InterfaceAudience
- * annotations. All InterfaceAudience.Public annotated classes MUST also have InterfaceStability
- * annotations. Think twice about marking an interface InterfaceAudience.Public. Make sure that
- * it is an interface, not a class (for most cases), and clients will actually depend on it. Once
- * something is marked with Public, we cannot change the signatures within the major release. NOT
- * everything in the hbase-client module or every java public class has to be marked with
+ * Test cases for ensuring our client visible classes have annotations for
+ * {@link InterfaceAudience}.
+ *
+ * All classes in hbase-client and hbase-common module MUST have InterfaceAudience annotations.
+ * Think twice about marking an interface InterfaceAudience.Public. Make sure that it is an
+ * interface, not a class (for most cases), and clients will actually depend on it. Once something
+ * is marked with Public, we cannot change the signatures within the major release. NOT everything
+ * in the hbase-client module or every java public class has to be marked with
* InterfaceAudience.Public. ONLY the ones that an hbase application will directly use (Table, Get,
- * etc, versus ProtobufUtil).
- *
- * Also note that HBase has it's own annotations in hbase-annotations module with the same names
- * as in Hadoop. You should use the HBase's classes.
- *
- * See https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html
+ * etc, versus ProtobufUtil). And also, InterfaceAudience.Public annotated classes MUST NOT have
+ * InterfaceStability annotations. The stability of these classes only depends on versioning.
+ *
+ * All classes which are marked as InterfaceAudience.LimitedPrivate MUST also have
+ * InterfaceStability annotations. The only exception is HBaseInterfaceAudience.CONFIG. It is used
+ * to indicate that the class name will be exposed in user facing configuration files.
+ *
+ * Also note that HBase has it's own annotations in hbase-annotations module with the same names as
+ * in Hadoop. You should use the HBase's classes.
+ *
+ * See
+ * https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html
* and https://issues.apache.org/jira/browse/HBASE-10462.
*/
@Category(SmallTests.class)
@@ -105,16 +111,15 @@ public class TestInterfaceAudienceAnnotations {
return false;
}
- Class> ann = getAnnotation(c);
- if (ann != null &&
- !InterfaceAudience.Public.class.equals(ann)) {
+ Annotation ann = getAnnotation(c);
+ if (ann != null && !InterfaceAudience.Public.class.equals(ann.annotationType())) {
return true;
}
return isAnnotatedPrivate(c.getEnclosingClass());
}
- protected Class> getAnnotation(Class> c) {
+ protected Annotation getAnnotation(Class> c) {
// we should get only declared annotations, not inherited ones
Annotation[] anns = c.getDeclaredAnnotations();
@@ -123,7 +128,7 @@ public class TestInterfaceAudienceAnnotations {
// an enum instead we have three independent annotations!
Class> type = ann.annotationType();
if (isInterfaceAudienceClass(type)) {
- return type;
+ return ann;
}
}
return null;
@@ -159,13 +164,32 @@ public class TestInterfaceAudienceAnnotations {
}
}
- /** Selects classes with one of the {@link InterfaceAudience.Public} annotation in their
- * class declaration.
+ /**
+ * Selects classes with one of the {@link InterfaceAudience.Public} annotation in their class
+ * declaration.
*/
class InterfaceAudiencePublicAnnotatedClassFilter extends InterfaceAudienceAnnotatedClassFilter {
@Override
public boolean isCandidateClass(Class> c) {
- return (InterfaceAudience.Public.class.equals(getAnnotation(c)));
+ Annotation ann = getAnnotation(c);
+ return ann != null && InterfaceAudience.Public.class.equals(ann.annotationType());
+ }
+ }
+
+ /**
+ * Selects classes with one of the {@link InterfaceAudience.LimitedPrivate} annotation in their
+ * class declaration.
+ */
+ class InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter
+ extends InterfaceAudienceAnnotatedClassFilter {
+ @Override
+ public boolean isCandidateClass(Class> c) {
+ Annotation ann = getAnnotation(c);
+ if (ann == null || !InterfaceAudience.LimitedPrivate.class.equals(ann.annotationType())) {
+ return false;
+ }
+ InterfaceAudience.LimitedPrivate iaAnn = (InterfaceAudience.LimitedPrivate) ann;
+ return iaAnn.value().length == 0 || !HBaseInterfaceAudience.CONFIG.equals(iaAnn.value()[0]);
}
}
@@ -288,10 +312,11 @@ public class TestInterfaceAudienceAnnotations {
);
Set> classes = classFinder.findClasses(false);
-
- LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
- for (Class> clazz : classes) {
- LOG.info(clazz);
+ if (!classes.isEmpty()) {
+ LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
+ for (Class> clazz : classes) {
+ LOG.info(clazz);
+ }
}
Assert.assertEquals("All classes should have @InterfaceAudience annotation",
@@ -300,10 +325,10 @@ public class TestInterfaceAudienceAnnotations {
/**
* Checks whether all the classes in client and common modules that are marked
- * InterfaceAudience.Public also have {@link InterfaceStability} annotations.
+ * InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
*/
@Test
- public void testInterfaceStabilityAnnotation()
+ public void testNoInterfaceStabilityAnnotationForPublicAPI()
throws ClassNotFoundException, IOException, LinkageError {
// find classes that are:
@@ -313,7 +338,7 @@ public class TestInterfaceAudienceAnnotations {
// NOT test classes
// AND NOT generated classes
// AND are annotated with InterfaceAudience.Public
- // AND NOT annotated with InterfaceStability
+ // AND annotated with InterfaceStability
ClassFinder classFinder = new ClassFinder(
new And(new MainCodeResourcePathFilter(),
new TestFileNameFilter()),
@@ -324,18 +349,65 @@ public class TestInterfaceAudienceAnnotations {
new Not(new ShadedProtobufClassFilter()),
new InterfaceAudiencePublicAnnotatedClassFilter(),
new Not(new IsInterfaceStabilityClassFilter()),
- new Not(new InterfaceStabilityAnnotatedClassFilter()))
+ new InterfaceStabilityAnnotatedClassFilter())
);
Set> classes = classFinder.findClasses(false);
- LOG.info("These are the classes that DO NOT have @InterfaceStability annotation:");
- for (Class> clazz : classes) {
- LOG.info(clazz);
+ if (!classes.isEmpty()) {
+ LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " +
+ "annotation:");
+ for (Class> clazz : classes) {
+ LOG.info(clazz);
+ }
}
- Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should "
- + "have @InterfaceStability annotation as well",
+ Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should not "
+ + "have @InterfaceStability annotation",
+ 0, classes.size());
+ }
+
+ /**
+ * Checks whether all the classes in client and common modules that are marked
+ * InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
+ */
+ @Ignore
+ @Test
+ public void testInterfaceStabilityAnnotationForLimitedAPI()
+ throws ClassNotFoundException, IOException, LinkageError {
+
+ // find classes that are:
+ // In the main jar
+ // AND are not in a hadoop-compat module
+ // AND are public
+ // NOT test classes
+ // AND NOT generated classes
+ // AND are annotated with InterfaceAudience.LimitedPrivate
+ // AND NOT annotated with InterfaceStability
+ ClassFinder classFinder = new ClassFinder(
+ new And(new MainCodeResourcePathFilter(),
+ new TestFileNameFilter()),
+ new Not((FileNameFilter)new TestFileNameFilter()),
+ new And(new PublicClassFilter(),
+ new Not(new TestClassFilter()),
+ new Not(new GeneratedClassFilter()),
+ new Not(new ShadedProtobufClassFilter()),
+ new InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter(),
+ new Not(new IsInterfaceStabilityClassFilter()),
+ new Not(new InterfaceStabilityAnnotatedClassFilter()))
+ );
+
+ Set> classes = classFinder.findClasses(false);
+
+ if (!classes.isEmpty()) {
+ LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " +
+ "have @InterfaceStability annotation:");
+ for (Class> clazz : classes) {
+ LOG.info(clazz);
+ }
+ }
+ Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " +
+ "should have @InterfaceStability annotation",
0, classes.size());
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java
index bbed218..d715d01 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java
@@ -25,7 +25,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.Strings;
@@ -68,7 +67,6 @@ import org.apache.hadoop.security.UserGroupInformation;
* an example of configuring a user of this Auth Chore to run on a secure cluster.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class AuthUtil {
private static final Log LOG = LogFactory.getLog(AuthUtil.class);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
index 53b319b..8a701f2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
@@ -60,7 +59,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface Cell {
//1) Row
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index bb5197f..6585173 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -36,7 +36,6 @@ import java.util.NavigableMap;
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceAudience.Private;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TagCompressionContext;
import org.apache.hadoop.hbase.io.util.Dictionary;
@@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.util.ClassSize;
* method level.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class CellUtil {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
index 19363d0..70858f4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java
@@ -32,7 +32,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* ChoreService is a service that can be used to schedule instances of {@link ScheduledChore} to run
@@ -54,7 +53,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* Calling this method ensures that all scheduled chores are cancelled and cleaned up properly.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class ChoreService implements ChoreServicer {
private static final Log LOG = LogFactory.getLog(ChoreService.class);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
index 4baaabe..885219a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
@@ -26,7 +26,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
@@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.zookeeper.ZKConfig;
* Adds HBase configuration files to a Configuration
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class HBaseConfiguration extends Configuration {
private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java
index edcbdc5..85e8725 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* All hbase specific IOExceptions should be subclasses of HBaseIOException
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class HBaseIOException extends IOException {
private static final long serialVersionUID = 1L;
@@ -46,4 +44,4 @@ public class HBaseIOException extends IOException {
public HBaseIOException(Throwable cause) {
super(cause);
}
-}
\ No newline at end of file
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java
index cb42e48..ae1db7c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* This class defines constants for different classes of hbase limited private apis
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class HBaseInterfaceAudience {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index 3789f71..eff5690 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -29,14 +29,12 @@ import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
* HConstants holds a bunch of HBase-related constants
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public final class HConstants {
// NOTICE!!!! Please do not add a constants here, unless they are referenced by a lot of classes.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
index 23876ab..15f71a9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java
@@ -27,7 +27,6 @@ import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* as opposed to a more tangible container.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class NamespaceDescriptor {
/** System namespace name. */
@@ -162,7 +160,6 @@ public class NamespaceDescriptor {
}
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public static class Builder {
private String bName;
private Map bConfiguration = new TreeMap<>();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
index f35f27b..bb8bb08 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase;
import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.NonceKey;
@@ -31,7 +30,6 @@ import org.apache.hadoop.util.StringUtils;
* Procedure information
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ProcedureInfo implements Cloneable {
private final long procId;
private final String procName;
@@ -216,4 +214,4 @@ public class ProcedureInfo implements Cloneable {
return procOwner.equals(user.getShortName());
}
-}
\ No newline at end of file
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java
index 306d285..5d95add 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* POJO representing Procedure State
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum ProcedureState {
INITIALIZING, RUNNABLE, WAITING, WAITING_TIMEOUT, ROLLEDBACK, FINISHED;
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 422ca1a..2d1eec5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -24,7 +24,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import com.google.common.annotations.VisibleForTesting;
@@ -42,7 +41,6 @@ import com.google.common.annotations.VisibleForTesting;
* an entry being added to a queue, etc.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public abstract class ScheduledChore implements Runnable {
private static final Log LOG = LogFactory.getLog(ScheduledChore.class);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
index 0c0a7ff..fabf0c0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java
@@ -25,7 +25,6 @@ import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.net.Address;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
@@ -56,7 +55,6 @@ import com.google.common.net.InetAddresses;
*
Immutable.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ServerName implements Comparable, Serializable {
private static final long serialVersionUID = 1367463982557264981L;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java
index 9adaa1a..cdb802c 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java
@@ -19,13 +19,11 @@
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Implementers are Stoppable.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public interface Stoppable {
/**
* Stop this service.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
index cba03c0..c4c15d0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
@@ -24,7 +24,6 @@ import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
@@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class TableName implements Comparable {
/** See {@link #createTableNameIfNecessary(ByteBuffer, ByteBuffer)} */
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
index 2133750..8af562e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.filter;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.Bytes;
/** Base class for byte array comparators */
@InterfaceAudience.Public
-@InterfaceStability.Stable
// TODO Now we are deviating a lot from the actual Comparable what this implements, by
// adding special compareTo methods. We have to clean it. Deprecate this class and replace it
// with a more generic one which says it compares bytes (not necessary a byte array only)
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java
index f6f7def..9aaa431 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java
@@ -28,7 +28,6 @@ import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
@@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* Not thread safe!
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class ByteBufferOutputStream extends OutputStream
implements ByteBufferWriter {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
index f658210..5c7c292 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
@@ -25,7 +25,6 @@ import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
@@ -40,7 +39,6 @@ import org.apache.hadoop.io.WritableComparator;
* buffer is accessed when we go to serialize.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS",
justification="It has been like this forever")
@@ -229,7 +227,6 @@ implements WritableComparable {
/** A Comparator optimized for ImmutableBytesWritable.
*/
@InterfaceAudience.Public
- @InterfaceStability.Stable
public static class Comparator extends WritableComparator {
private BytesWritable.Comparator comparator =
new BytesWritable.Comparator();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
index 77b9495..764b2a0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
@@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.io;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.util.Bytes;
*
Immutable. Thread-safe.
*/
@InterfaceAudience.Public
-@InterfaceStability.Stable
public class TimeRange {
public static final long INITIAL_MIN_TIMESTAMP = 0L;
public static final long INITIAL_MAX_TIMESTAMP = Long.MAX_VALUE;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
index 8dfab44..6f63f80 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
@@ -28,7 +28,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
@@ -101,7 +100,6 @@ public final class Compression {
value="SE_TRANSIENT_FIELD_NOT_RESTORED",
justification="We are not serializing so doesn't apply (not sure why transient though)")
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public static enum Algorithm {
LZO("lzo") {
// Use base type to avoid compile-time dependencies.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java
index e19a13d..3f4bf2b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java
@@ -22,13 +22,11 @@ import java.io.OutputStream;
import java.security.Key;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* A common interface for a cryptographic algorithm.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public abstract class Cipher {
public static final int KEY_LENGTH = 16;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java
index 5a475cc..e457c13 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java
@@ -18,14 +18,12 @@ package org.apache.hadoop.hbase.io.crypto;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* An CipherProvider contributes support for various cryptographic
* Ciphers.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface CipherProvider extends Configurable {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java
index 1e2881e..a8dc396 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java
@@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.MD5Hash;
import com.google.common.base.Preconditions;
@@ -31,7 +30,6 @@ import com.google.common.base.Preconditions;
* Crypto context. Encapsulates an encryption algorithm and its key material.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class Context implements Configurable {
private Configuration conf;
private Cipher cipher;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
index 3f5cd2d..6f78e83 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java
@@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.io.crypto;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.crypto.aes.CommonsCryptoAES;
/**
* The default cipher provider. Supports AES via the Commons Crypto.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class CryptoCipherProvider implements CipherProvider {
private static CryptoCipherProvider instance;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java
index d3029db..947e11a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java
@@ -21,13 +21,11 @@ import java.io.InputStream;
import java.security.Key;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Decryptors apply a cipher to an InputStream to recover plaintext.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface Decryptor {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
index 4f2aebe..9c82b2a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java
@@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.io.crypto;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.crypto.aes.AES;
/**
* The default cipher provider. Supports AES via the JCE.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class DefaultCipherProvider implements CipherProvider {
private static DefaultCipherProvider instance;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
index b6c2e97..e8727a7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.ReflectionUtils;
@@ -48,7 +47,6 @@ import org.apache.hadoop.util.ReflectionUtils;
* A facade for encryption algorithms and related support.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public final class Encryption {
private static final Log LOG = LogFactory.getLog(Encryption.class);
@@ -57,7 +55,6 @@ public final class Encryption {
* Crypto context
*/
@InterfaceAudience.Public
- @InterfaceStability.Evolving
public static class Context extends org.apache.hadoop.hbase.io.crypto.Context {
/** The null crypto context */
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java
index cda703d..4e84a68 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java
@@ -21,13 +21,11 @@ import java.io.OutputStream;
import java.security.Key;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Encryptors apply a cipher to an OutputStream to produce ciphertext.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface Encryptor {
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java
index 515a664..e0542e1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.io.crypto;
import java.security.Key;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* KeyProvider is a interface to abstract the different methods of retrieving
@@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
*
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public interface KeyProvider {
public static final String PASSWORD = "password";
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java
index 7e3c013..1f6c83a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java
@@ -34,7 +34,6 @@ import java.util.Locale;
import java.util.Properties;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* A basic KeyProvider that can resolve keys from a protected KeyStore file
@@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
* LoadStoreParameters.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public class KeyStoreKeyProvider implements KeyProvider {
protected KeyStore store;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
index d7535e5..1133b91 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
@@ -20,7 +20,6 @@ import java.io.IOException;
import java.io.OutputStream;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.util.Bytes;
/**
@@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.util.Bytes;
* in the HBase mailing list to prevent collisions.
*/
@InterfaceAudience.Public
-@InterfaceStability.Evolving
public enum DataBlockEncoding {
/** Disable data block encoding. */
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
index 4cc636e..3ee8cfc 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.hbase.net;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.classification.InterfaceStability;
import com.google.common.net.HostAndPort;
@@ -30,7 +29,6 @@ import com.google.common.net.HostAndPort;
*
In implementation this class is a facade over Guava's {@link HostAndPort}.
* We cannot have Guava classes in our API hence this Type.
*/
-@InterfaceStability.Evolving
@InterfaceAudience.Public
public class Address implements Comparable