From 0bd21d6a7236c010642c7536a70fa1c001f93e10 Mon Sep 17 00:00:00 2001 From: zhangduo Date: Fri, 31 Mar 2017 20:59:04 +0800 Subject: [PATCH] HBASE-17857 Remove IS annotations from IA.Public classes --- .../hbase/classification/InterfaceAudience.java | 1 - .../hbase/classification/InterfaceStability.java | 1 - .../apache/hadoop/hbase/CallDroppedException.java | 4 +- .../hadoop/hbase/CallQueueTooBigException.java | 4 +- .../hadoop/hbase/ClockOutOfSyncException.java | 2 - .../org/apache/hadoop/hbase/ClusterStatus.java | 2 - .../apache/hadoop/hbase/DoNotRetryIOException.java | 2 - .../hadoop/hbase/DroppedSnapshotException.java | 2 - .../org/apache/hadoop/hbase/HColumnDescriptor.java | 2 - .../java/org/apache/hadoop/hbase/HRegionInfo.java | 2 - .../org/apache/hadoop/hbase/HRegionLocation.java | 2 - .../org/apache/hadoop/hbase/HTableDescriptor.java | 2 - .../hbase/InvalidFamilyOperationException.java | 2 - .../org/apache/hadoop/hbase/KeepDeletedCells.java | 2 - .../hadoop/hbase/MasterNotRunningException.java | 2 - .../hadoop/hbase/MemoryCompactionPolicy.java | 2 - .../hadoop/hbase/MultiActionResultTooLarge.java | 2 - .../hadoop/hbase/NamespaceExistException.java | 2 - .../hadoop/hbase/NamespaceNotFoundException.java | 2 - .../hbase/NotAllMetaRegionsOnlineException.java | 2 - .../hadoop/hbase/NotServingRegionException.java | 2 - .../apache/hadoop/hbase/PleaseHoldException.java | 2 - .../org/apache/hadoop/hbase/RegionException.java | 2 - .../java/org/apache/hadoop/hbase/RegionLoad.java | 2 - .../hadoop/hbase/RegionTooBusyException.java | 2 - .../hbase/ReplicationPeerNotFoundException.java | 4 +- .../hadoop/hbase/RetryImmediatelyException.java | 2 - .../java/org/apache/hadoop/hbase/ServerLoad.java | 2 - .../apache/hadoop/hbase/TableExistsException.java | 2 - .../hadoop/hbase/TableInfoMissingException.java | 2 - .../hadoop/hbase/TableNotDisabledException.java | 2 - .../hadoop/hbase/TableNotEnabledException.java | 2 - .../hadoop/hbase/TableNotFoundException.java | 2 - .../hadoop/hbase/UnknownRegionException.java | 2 - .../hadoop/hbase/UnknownScannerException.java | 2 - .../hadoop/hbase/ZooKeeperConnectionException.java | 2 - .../java/org/apache/hadoop/hbase/client/Admin.java | 2 - .../org/apache/hadoop/hbase/client/Append.java | 2 - .../org/apache/hadoop/hbase/client/AsyncAdmin.java | 4 +- .../hadoop/hbase/client/AsyncConnection.java | 4 +- .../org/apache/hadoop/hbase/client/AsyncTable.java | 2 - .../apache/hadoop/hbase/client/AsyncTableBase.java | 2 - .../hadoop/hbase/client/AsyncTableBuilder.java | 2 - .../hbase/client/AsyncTableRegionLocator.java | 2 - .../org/apache/hadoop/hbase/client/Attributes.java | 2 - .../hadoop/hbase/client/BufferedMutator.java | 3 - .../hadoop/hbase/client/BufferedMutatorParams.java | 2 - .../apache/hadoop/hbase/client/CompactType.java | 4 +- .../hadoop/hbase/client/CompactionState.java | 2 - .../org/apache/hadoop/hbase/client/Connection.java | 2 - .../hadoop/hbase/client/ConnectionFactory.java | 2 - .../apache/hadoop/hbase/client/Consistency.java | 2 - .../org/apache/hadoop/hbase/client/Delete.java | 2 - .../hbase/client/DoNotRetryRegionException.java | 2 - .../org/apache/hadoop/hbase/client/Durability.java | 2 - .../java/org/apache/hadoop/hbase/client/Get.java | 2 - .../hadoop/hbase/client/HTableMultiplexer.java | 3 - .../org/apache/hadoop/hbase/client/Increment.java | 2 - .../apache/hadoop/hbase/client/IsolationLevel.java | 2 - .../hadoop/hbase/client/MasterSwitchType.java | 4 +- .../hbase/client/MobCompactPartitionPolicy.java | 2 - .../org/apache/hadoop/hbase/client/Mutation.java | 2 - .../hbase/client/NoServerForRegionException.java | 2 - .../org/apache/hadoop/hbase/client/Operation.java | 2 - .../hbase/client/OperationWithAttributes.java | 2 - .../java/org/apache/hadoop/hbase/client/Put.java | 2 - .../java/org/apache/hadoop/hbase/client/Query.java | 4 +- .../apache/hadoop/hbase/client/RawAsyncTable.java | 4 - .../hadoop/hbase/client/RawScanResultConsumer.java | 6 +- .../hadoop/hbase/client/RegionLoadStats.java | 2 - .../apache/hadoop/hbase/client/RegionLocator.java | 2 - .../hbase/client/RegionOfflineException.java | 2 - .../hadoop/hbase/client/RequestController.java | 4 - .../hbase/client/RequestControllerFactory.java | 2 - .../org/apache/hadoop/hbase/client/Result.java | 2 - .../apache/hadoop/hbase/client/ResultScanner.java | 2 - .../hbase/client/RetriesExhaustedException.java | 2 - .../RetriesExhaustedWithDetailsException.java | 2 - .../java/org/apache/hadoop/hbase/client/Row.java | 2 - .../org/apache/hadoop/hbase/client/RowAccess.java | 2 - .../apache/hadoop/hbase/client/RowMutations.java | 2 - .../hadoop/hbase/client/RowTooBigException.java | 2 - .../hadoop/hbase/client/RpcRetryingCaller.java | 4 +- .../java/org/apache/hadoop/hbase/client/Scan.java | 3 - .../hadoop/hbase/client/ScanResultConsumer.java | 2 - .../hbase/client/ShortCircuitMasterConnection.java | 2 - .../hadoop/hbase/client/SnapshotDescription.java | 2 - .../apache/hadoop/hbase/client/SnapshotType.java | 4 +- .../hbase/client/SyncCoprocessorRpcChannel.java | 4 +- .../java/org/apache/hadoop/hbase/client/Table.java | 2 - .../apache/hadoop/hbase/client/TableBuilder.java | 2 - .../hbase/client/UnmodifyableHRegionInfo.java | 2 - .../hbase/client/UnmodifyableHTableDescriptor.java | 2 - .../hadoop/hbase/client/WrongRowIOException.java | 2 - .../hbase/client/backoff/ClientBackoffPolicy.java | 6 +- .../backoff/ExponentialClientBackoffPolicy.java | 6 +- .../hadoop/hbase/client/coprocessor/Batch.java | 4 - .../hadoop/hbase/client/metrics/ScanMetrics.java | 2 - .../client/metrics/ServerSideScanMetrics.java | 2 - .../hbase/client/replication/ReplicationAdmin.java | 2 - .../hadoop/hbase/client/replication/TableCFs.java | 4 +- .../hbase/client/security/SecurityCapability.java | 2 - .../coprocessor/BypassCoprocessorException.java | 4 +- .../hbase/coprocessor/CoprocessorException.java | 2 - .../exceptions/ConnectionClosingException.java | 2 - .../exceptions/FailedSanityCheckException.java | 2 - .../hbase/exceptions/MergeRegionException.java | 2 - .../exceptions/PreemptiveFastFailException.java | 4 +- .../exceptions/RegionInRecoveryException.java | 2 - .../hbase/exceptions/RequestTooBigException.java | 2 - .../hbase/exceptions/ScannerResetException.java | 2 - .../hbase/exceptions/UnknownProtocolException.java | 2 - .../hadoop/hbase/filter/BinaryComparator.java | 2 - .../hbase/filter/BinaryPrefixComparator.java | 2 - .../apache/hadoop/hbase/filter/BitComparator.java | 3 - .../hadoop/hbase/filter/ColumnCountGetFilter.java | 2 - .../hbase/filter/ColumnPaginationFilter.java | 2 - .../hadoop/hbase/filter/ColumnPrefixFilter.java | 2 - .../hadoop/hbase/filter/ColumnRangeFilter.java | 2 - .../apache/hadoop/hbase/filter/CompareFilter.java | 3 - .../hadoop/hbase/filter/DependentColumnFilter.java | 2 - .../apache/hadoop/hbase/filter/FamilyFilter.java | 2 - .../org/apache/hadoop/hbase/filter/Filter.java | 3 - .../org/apache/hadoop/hbase/filter/FilterList.java | 3 - .../hadoop/hbase/filter/FirstKeyOnlyFilter.java | 2 - .../FirstKeyValueMatchingQualifiersFilter.java | 2 - .../apache/hadoop/hbase/filter/FuzzyRowFilter.java | 2 - .../hadoop/hbase/filter/InclusiveStopFilter.java | 2 - .../hbase/filter/IncompatibleFilterException.java | 2 - .../hbase/filter/InvalidRowFilterException.java | 2 - .../apache/hadoop/hbase/filter/KeyOnlyFilter.java | 2 - .../apache/hadoop/hbase/filter/LongComparator.java | 2 - .../hadoop/hbase/filter/MultiRowRangeFilter.java | 3 - .../hbase/filter/MultipleColumnPrefixFilter.java | 2 - .../apache/hadoop/hbase/filter/NullComparator.java | 2 - .../org/apache/hadoop/hbase/filter/PageFilter.java | 2 - .../apache/hadoop/hbase/filter/ParseConstants.java | 2 - .../apache/hadoop/hbase/filter/ParseFilter.java | 2 - .../apache/hadoop/hbase/filter/PrefixFilter.java | 2 - .../hadoop/hbase/filter/QualifierFilter.java | 2 - .../hadoop/hbase/filter/RandomRowFilter.java | 2 - .../hadoop/hbase/filter/RegexStringComparator.java | 3 - .../org/apache/hadoop/hbase/filter/RowFilter.java | 2 - .../filter/SingleColumnValueExcludeFilter.java | 2 - .../hbase/filter/SingleColumnValueFilter.java | 2 - .../org/apache/hadoop/hbase/filter/SkipFilter.java | 2 - .../hadoop/hbase/filter/SubstringComparator.java | 2 - .../hadoop/hbase/filter/TimestampsFilter.java | 2 - .../apache/hadoop/hbase/filter/ValueFilter.java | 2 - .../hadoop/hbase/filter/WhileMatchFilter.java | 2 - .../apache/hadoop/hbase/ipc/BadAuthException.java | 2 - .../hadoop/hbase/ipc/CallCancelledException.java | 2 - .../hadoop/hbase/ipc/CallTimeoutException.java | 2 - .../hbase/ipc/CallerDisconnectedException.java | 2 - .../hbase/ipc/CellScannerButNoCodecException.java | 4 +- .../hadoop/hbase/ipc/CoprocessorRpcChannel.java | 4 +- .../hadoop/hbase/ipc/FailedServerException.java | 2 - .../hbase/ipc/FallbackDisallowedException.java | 4 +- .../hadoop/hbase/ipc/FatalConnectionException.java | 2 - .../hbase/ipc/NettyRpcClientConfigHelper.java | 2 - .../hbase/ipc/RemoteWithExtrasException.java | 2 - .../hbase/ipc/ServerNotRunningYetException.java | 2 - .../hadoop/hbase/ipc/ServerTooBusyException.java | 4 +- .../hbase/ipc/StoppedRpcClientException.java | 2 - .../hbase/ipc/UnsupportedCellCodecException.java | 2 - .../ipc/UnsupportedCompressionCodecException.java | 2 - .../hbase/ipc/UnsupportedCryptoException.java | 2 - .../hadoop/hbase/ipc/WrongVersionException.java | 2 - .../hbase/quotas/QuotaExceededException.java | 2 - .../apache/hadoop/hbase/quotas/QuotaFilter.java | 2 - .../apache/hadoop/hbase/quotas/QuotaRetriever.java | 4 +- .../org/apache/hadoop/hbase/quotas/QuotaScope.java | 2 - .../apache/hadoop/hbase/quotas/QuotaSettings.java | 2 - .../hadoop/hbase/quotas/QuotaSettingsFactory.java | 2 - .../org/apache/hadoop/hbase/quotas/QuotaType.java | 2 - .../apache/hadoop/hbase/quotas/ThrottleType.java | 2 - .../hadoop/hbase/quotas/ThrottlingException.java | 3 - .../hadoop/hbase/regionserver/BloomType.java | 2 - .../hadoop/hbase/regionserver/LeaseException.java | 2 - .../regionserver/NoSuchColumnFamilyException.java | 2 - .../regionserver/RegionServerAbortedException.java | 2 - .../regionserver/RegionServerRunningException.java | 2 - .../regionserver/RegionServerStoppedException.java | 2 - .../hbase/regionserver/WrongRegionException.java | 2 - .../regionserver/wal/FailedLogCloseException.java | 2 - .../wal/FailedSyncBeforeLogCloseException.java | 2 - .../hbase/replication/ReplicationException.java | 2 - .../hbase/replication/ReplicationPeerConfig.java | 2 - .../replication/ReplicationPeerDescription.java | 4 +- .../hbase/security/AccessDeniedException.java | 2 - .../hbase/security/access/AccessControlClient.java | 4 +- .../security/access/AccessControlConstants.java | 2 - .../hadoop/hbase/security/access/Permission.java | 3 - .../hbase/security/visibility/Authorizations.java | 2 - .../hbase/security/visibility/CellVisibility.java | 2 - .../security/visibility/InvalidLabelException.java | 2 - .../visibility/LabelAlreadyExistsException.java | 2 - .../security/visibility/VisibilityClient.java | 2 - .../VisibilityControllerNotReadyException.java | 2 - .../hbase/snapshot/CorruptedSnapshotException.java | 2 - .../hbase/snapshot/ExportSnapshotException.java | 2 - .../hbase/snapshot/HBaseSnapshotException.java | 2 - .../hbase/snapshot/RestoreSnapshotException.java | 2 - .../hbase/snapshot/SnapshotCreationException.java | 2 - .../snapshot/SnapshotDoesNotExistException.java | 2 - .../hbase/snapshot/SnapshotExistsException.java | 2 - .../snapshot/TablePartiallyOpenException.java | 2 - .../hbase/snapshot/UnknownSnapshotException.java | 2 - .../hbase/util/FileSystemVersionException.java | 2 - .../org/apache/hadoop/hbase/util/JsonMapper.java | 2 - .../hbase/TestInterfaceAudienceAnnotations.java | 144 +++++++++++++++------ .../java/org/apache/hadoop/hbase/AuthUtil.java | 2 - .../main/java/org/apache/hadoop/hbase/Cell.java | 2 - .../java/org/apache/hadoop/hbase/CellUtil.java | 2 - .../java/org/apache/hadoop/hbase/ChoreService.java | 2 - .../apache/hadoop/hbase/HBaseConfiguration.java | 2 - .../org/apache/hadoop/hbase/HBaseIOException.java | 4 +- .../hadoop/hbase/HBaseInterfaceAudience.java | 2 - .../java/org/apache/hadoop/hbase/HConstants.java | 2 - .../apache/hadoop/hbase/NamespaceDescriptor.java | 3 - .../org/apache/hadoop/hbase/ProcedureInfo.java | 4 +- .../org/apache/hadoop/hbase/ProcedureState.java | 2 - .../org/apache/hadoop/hbase/ScheduledChore.java | 2 - .../java/org/apache/hadoop/hbase/ServerName.java | 2 - .../java/org/apache/hadoop/hbase/Stoppable.java | 2 - .../java/org/apache/hadoop/hbase/TableName.java | 2 - .../hadoop/hbase/filter/ByteArrayComparable.java | 2 - .../hadoop/hbase/io/ByteBufferOutputStream.java | 2 - .../hadoop/hbase/io/ImmutableBytesWritable.java | 3 - .../java/org/apache/hadoop/hbase/io/TimeRange.java | 2 - .../hadoop/hbase/io/compress/Compression.java | 2 - .../org/apache/hadoop/hbase/io/crypto/Cipher.java | 2 - .../hadoop/hbase/io/crypto/CipherProvider.java | 2 - .../org/apache/hadoop/hbase/io/crypto/Context.java | 2 - .../hbase/io/crypto/CryptoCipherProvider.java | 2 - .../apache/hadoop/hbase/io/crypto/Decryptor.java | 2 - .../hbase/io/crypto/DefaultCipherProvider.java | 2 - .../apache/hadoop/hbase/io/crypto/Encryption.java | 3 - .../apache/hadoop/hbase/io/crypto/Encryptor.java | 2 - .../apache/hadoop/hbase/io/crypto/KeyProvider.java | 2 - .../hbase/io/crypto/KeyStoreKeyProvider.java | 2 - .../hbase/io/encoding/DataBlockEncoding.java | 2 - .../java/org/apache/hadoop/hbase/net/Address.java | 2 - .../apache/hadoop/hbase/rsgroup/RSGroupInfo.java | 2 - .../org/apache/hadoop/hbase/security/User.java | 2 - .../org/apache/hadoop/hbase/types/DataType.java | 2 - .../hadoop/hbase/types/FixedLengthWrapper.java | 2 - .../org/apache/hadoop/hbase/types/OrderedBlob.java | 2 - .../apache/hadoop/hbase/types/OrderedBlobVar.java | 2 - .../hadoop/hbase/types/OrderedBytesBase.java | 2 - .../apache/hadoop/hbase/types/OrderedFloat32.java | 2 - .../apache/hadoop/hbase/types/OrderedFloat64.java | 2 - .../apache/hadoop/hbase/types/OrderedInt16.java | 2 - .../apache/hadoop/hbase/types/OrderedInt32.java | 2 - .../apache/hadoop/hbase/types/OrderedInt64.java | 2 - .../org/apache/hadoop/hbase/types/OrderedInt8.java | 2 - .../apache/hadoop/hbase/types/OrderedNumeric.java | 2 - .../apache/hadoop/hbase/types/OrderedString.java | 2 - .../java/org/apache/hadoop/hbase/types/PBType.java | 2 - .../org/apache/hadoop/hbase/types/RawByte.java | 2 - .../org/apache/hadoop/hbase/types/RawBytes.java | 2 - .../hadoop/hbase/types/RawBytesFixedLength.java | 2 - .../hadoop/hbase/types/RawBytesTerminated.java | 2 - .../org/apache/hadoop/hbase/types/RawDouble.java | 2 - .../org/apache/hadoop/hbase/types/RawFloat.java | 2 - .../org/apache/hadoop/hbase/types/RawInteger.java | 2 - .../org/apache/hadoop/hbase/types/RawLong.java | 2 - .../org/apache/hadoop/hbase/types/RawShort.java | 2 - .../org/apache/hadoop/hbase/types/RawString.java | 2 - .../hadoop/hbase/types/RawStringFixedLength.java | 2 - .../hadoop/hbase/types/RawStringTerminated.java | 2 - .../java/org/apache/hadoop/hbase/types/Struct.java | 2 - .../apache/hadoop/hbase/types/StructBuilder.java | 2 - .../apache/hadoop/hbase/types/StructIterator.java | 2 - .../hadoop/hbase/types/TerminatedWrapper.java | 2 - .../java/org/apache/hadoop/hbase/types/Union2.java | 2 - .../java/org/apache/hadoop/hbase/types/Union3.java | 2 - .../java/org/apache/hadoop/hbase/types/Union4.java | 2 - .../java/org/apache/hadoop/hbase/util/Base64.java | 5 - .../apache/hadoop/hbase/util/ByteBufferUtils.java | 2 - .../org/apache/hadoop/hbase/util/ByteRange.java | 2 - .../apache/hadoop/hbase/util/ByteRangeUtils.java | 2 - .../java/org/apache/hadoop/hbase/util/Bytes.java | 4 - .../java/org/apache/hadoop/hbase/util/Counter.java | 2 - .../java/org/apache/hadoop/hbase/util/MD5Hash.java | 2 - .../java/org/apache/hadoop/hbase/util/Order.java | 2 - .../org/apache/hadoop/hbase/util/OrderedBytes.java | 2 - .../java/org/apache/hadoop/hbase/util/Pair.java | 2 - .../apache/hadoop/hbase/util/PairOfSameType.java | 2 - .../hadoop/hbase/util/PositionedByteRange.java | 2 - .../hbase/util/ReadOnlyByteRangeException.java | 2 - .../apache/hadoop/hbase/util/SimpleByteRange.java | 2 - .../hadoop/hbase/util/SimpleMutableByteRange.java | 2 - .../hbase/util/SimplePositionedByteRange.java | 2 - .../util/SimplePositionedMutableByteRange.java | 2 - .../org/apache/hadoop/hbase/util/VersionInfo.java | 2 - .../hadoop/hbase/HBaseCommonTestingUtility.java | 2 - .../client/coprocessor/AggregationClient.java | 2 +- .../client/coprocessor/AsyncAggregationClient.java | 2 - .../org/apache/hadoop/hbase/rest/Constants.java | 2 - .../apache/hadoop/hbase/rest/client/Client.java | 2 - .../apache/hadoop/hbase/rest/client/Cluster.java | 2 - .../hadoop/hbase/rest/client/RemoteAdmin.java | 2 - .../hadoop/hbase/rest/client/RemoteHTable.java | 2 - .../apache/hadoop/hbase/rest/client/Response.java | 2 - .../rest/filter/RestCsrfPreventionFilter.java | 1 - .../org/apache/hadoop/hbase/LocalHBaseCluster.java | 2 - .../hadoop/hbase/client/TableSnapshotScanner.java | 2 - .../hadoop/hbase/client/locking/EntityLock.java | 2 - .../hbase/errorhandling/ForeignException.java | 2 - .../hbase/errorhandling/TimeoutException.java | 2 - .../hadoop/hbase/mapred/GroupingTableMap.java | 2 - .../hadoop/hbase/mapred/HRegionPartitioner.java | 4 +- .../hadoop/hbase/mapred/IdentityTableMap.java | 2 - .../hadoop/hbase/mapred/IdentityTableReduce.java | 2 - .../mapred/MultiTableSnapshotInputFormat.java | 2 - .../org/apache/hadoop/hbase/mapred/RowCounter.java | 2 - .../hadoop/hbase/mapred/TableInputFormat.java | 2 - .../hadoop/hbase/mapred/TableInputFormatBase.java | 2 - .../org/apache/hadoop/hbase/mapred/TableMap.java | 2 - .../hadoop/hbase/mapred/TableMapReduceUtil.java | 2 - .../hadoop/hbase/mapred/TableOutputFormat.java | 2 - .../hadoop/hbase/mapred/TableRecordReader.java | 2 - .../hadoop/hbase/mapred/TableRecordReaderImpl.java | 2 - .../apache/hadoop/hbase/mapred/TableReduce.java | 2 - .../hbase/mapred/TableSnapshotInputFormat.java | 2 - .../org/apache/hadoop/hbase/mapred/TableSplit.java | 2 - .../apache/hadoop/hbase/mapreduce/CellCounter.java | 2 - .../apache/hadoop/hbase/mapreduce/CellCreator.java | 2 - .../apache/hadoop/hbase/mapreduce/CopyTable.java | 2 - .../org/apache/hadoop/hbase/mapreduce/Export.java | 2 - .../hbase/mapreduce/GroupingTableMapper.java | 2 - .../hadoop/hbase/mapreduce/HFileOutputFormat2.java | 2 - .../hadoop/hbase/mapreduce/HRegionPartitioner.java | 2 - .../hbase/mapreduce/IdentityTableMapper.java | 2 - .../hbase/mapreduce/IdentityTableReducer.java | 2 - .../org/apache/hadoop/hbase/mapreduce/Import.java | 2 - .../apache/hadoop/hbase/mapreduce/ImportTsv.java | 2 - .../hbase/mapreduce/KeyValueSerialization.java | 2 - .../hbase/mapreduce/KeyValueSortReducer.java | 2 - .../hbase/mapreduce/LoadIncrementalHFiles.java | 2 - .../hbase/mapreduce/MultiHFileOutputFormat.java | 2 - .../hbase/mapreduce/MultiTableInputFormat.java | 2 - .../hbase/mapreduce/MultiTableInputFormatBase.java | 2 - .../hbase/mapreduce/MultiTableOutputFormat.java | 2 - .../mapreduce/MultiTableSnapshotInputFormat.java | 2 - .../hbase/mapreduce/MutationSerialization.java | 2 - .../apache/hadoop/hbase/mapreduce/PutCombiner.java | 2 - .../hadoop/hbase/mapreduce/PutSortReducer.java | 2 - .../hbase/mapreduce/ResultSerialization.java | 2 - .../apache/hadoop/hbase/mapreduce/RowCounter.java | 2 - .../mapreduce/SimpleTotalOrderPartitioner.java | 2 - .../hadoop/hbase/mapreduce/TableInputFormat.java | 2 - .../hbase/mapreduce/TableInputFormatBase.java | 2 - .../hadoop/hbase/mapreduce/TableMapReduceUtil.java | 2 - .../apache/hadoop/hbase/mapreduce/TableMapper.java | 4 +- .../hbase/mapreduce/TableOutputCommitter.java | 2 - .../hadoop/hbase/mapreduce/TableOutputFormat.java | 2 - .../hadoop/hbase/mapreduce/TableRecordReader.java | 2 - .../hbase/mapreduce/TableRecordReaderImpl.java | 2 - .../hadoop/hbase/mapreduce/TableReducer.java | 2 - .../hbase/mapreduce/TableSnapshotInputFormat.java | 2 - .../apache/hadoop/hbase/mapreduce/TableSplit.java | 2 - .../hadoop/hbase/mapreduce/TextSortReducer.java | 2 - .../hadoop/hbase/mapreduce/TsvImporterMapper.java | 2 - .../hbase/mapreduce/TsvImporterTextMapper.java | 2 - .../mapreduce/VisibilityExpressionResolver.java | 2 - .../apache/hadoop/hbase/mapreduce/WALPlayer.java | 2 - .../org/apache/hadoop/hbase/mob/MobConstants.java | 2 - .../hbase/regionserver/RowTooBigException.java | 2 +- .../hadoop/hbase/security/token/TokenUtil.java | 2 - .../security/visibility/ScanLabelGenerator.java | 2 - .../visibility/VisibilityExpEvaluator.java | 2 - .../visibility/VisibilityLabelService.java | 2 - .../hadoop/hbase/snapshot/ExportSnapshot.java | 2 - .../apache/hadoop/hbase/snapshot/SnapshotInfo.java | 2 - .../hadoop/hbase/util/ConfigurationUtil.java | 2 - .../apache/hadoop/hbase/util/EncryptionTest.java | 2 - .../hbase/util/LeaseNotRecoveredException.java | 2 - .../hbase/zookeeper/MiniZooKeeperCluster.java | 2 - .../apache/hadoop/hbase/HBaseTestingUtility.java | 2 - .../org/apache/hadoop/hbase/MiniHBaseCluster.java | 2 - .../hadoop/hbase/codec/CodecPerformance.java | 2 - 383 files changed, 140 insertions(+), 852 deletions(-) diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java index 506ef56..8a34a64 100644 --- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java +++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceAudience.java @@ -43,7 +43,6 @@ import java.lang.annotation.RetentionPolicy; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class InterfaceAudience { /** * Intended for use by any project or application. diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java index ac20f3a..fbe71d1 100644 --- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java +++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/InterfaceStability.java @@ -39,7 +39,6 @@ import java.lang.annotation.RetentionPolicy; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class InterfaceStability { /** * Can evolve while retaining compatibility for minor release boundaries.; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java index ed14153..c2d3a7b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallDroppedException.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Returned to the clients when their request was discarded due to server being overloaded. @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class CallDroppedException extends IOException { public CallDroppedException() { super(); @@ -40,4 +38,4 @@ public class CallDroppedException extends IOException { public CallDroppedException(String message) { super(message); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java index 9f8b386..d615d0e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/CallQueueTooBigException.java @@ -21,11 +21,9 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class CallQueueTooBigException extends IOException { public CallQueueTooBigException() { super(); @@ -36,4 +34,4 @@ public class CallQueueTooBigException extends IOException { public CallQueueTooBigException(String message) { super(message); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java index b27ae82..9e3f556 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * This exception is thrown by the master when a region server clock skew is @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Stable public class ClockOutOfSyncException extends IOException { public ClockOutOfSyncException(String message) { super(message); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java index aed3af4..c51a437 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java @@ -26,7 +26,6 @@ import java.util.Set; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.io.VersionedWritable; @@ -50,7 +49,6 @@ import org.apache.hadoop.io.VersionedWritable; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ClusterStatus extends VersionedWritable { /** * Version for object serialization. Incremented for changes in serialized diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java index 8be2518..225ec4e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Subclass if exception is not meant to be retried: e.g. * {@link org.apache.hadoop.hbase.UnknownScannerException} */ @InterfaceAudience.Public -@InterfaceStability.Stable public class DoNotRetryIOException extends HBaseIOException { // TODO: This would be more useful as a marker interface than as a class. private static final long serialVersionUID = 1197446454511704139L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java index 1000d7d..2dbc93e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * persisted into store files. Response should include replay of wal content. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class DroppedSnapshotException extends IOException { private static final long serialVersionUID = -5463156580831677374L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 545ea61..46e97c3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Set; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.HBaseException; @@ -49,7 +48,6 @@ import com.google.common.base.Preconditions; * It is used as input when creating a table or adding a column. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HColumnDescriptor implements Comparable { // For future backward compatibility diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index b98d210..bc93cc6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -28,7 +28,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -76,7 +75,6 @@ import org.apache.hadoop.util.StringUtils; * previous behavior of a range corresponding to 1 region. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HRegionInfo implements Comparable { private static final Log LOG = LogFactory.getLog(HRegionInfo.class); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java index edb53dc..6cf22dd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionLocation.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Addressing; /** @@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.util.Addressing; *
This interface has been marked InterfaceAudience.Public in 0.96 and 0.98. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HRegionLocation implements Comparable { private final HRegionInfo regionInfo; private final ServerName serverName; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index 25fd896..ed0659c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -36,7 +36,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes; * when the region split should occur, coprocessors associated with it etc... */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HTableDescriptor implements Comparable { private static final Log LOG = LogFactory.getLog(HTableDescriptor.class); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java index 5d9c2ed..8fb05d5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if a request is table schema modification is requested but * made for an invalid family name. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class InvalidFamilyOperationException extends DoNotRetryIOException { private static final long serialVersionUID = 1L << 22 - 1L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java index d2d92b3..28ed13e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/KeepDeletedCells.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Ways to keep cells marked for delete around. @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * this way for backwards compatibility. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum KeepDeletedCells { /** Deleted Cells are not retained. */ FALSE, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java index ddd03e8..70afee2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java @@ -21,13 +21,11 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if the master is not running */ @InterfaceAudience.Public -@InterfaceStability.Stable public class MasterNotRunningException extends IOException { private static final long serialVersionUID = 1L << 23 - 1L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java index 0153f7d..8dc92ae 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MemoryCompactionPolicy.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Enum describing all possible memory compaction policies */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum MemoryCompactionPolicy { /** * No memory compaction, when size threshold is exceeded data is flushed to disk diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java index fdff554..90cd2c3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MultiActionResultTooLarge.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception thrown when the result needs to be chunked on the server side. @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * retries because some of the multi was a success. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MultiActionResultTooLarge extends RetryImmediatelyException { public MultiActionResultTooLarge(String s) { super(s); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java index a7ebf0d..10a1ed6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceExistException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a namespace exists but should not */ @InterfaceAudience.Public -@InterfaceStability.Stable public class NamespaceExistException extends DoNotRetryIOException { private static final long serialVersionUID = -1582357514338825412L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java index 092290d..f61cdea 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NamespaceNotFoundException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a namespace can not be located */ @InterfaceAudience.Public -@InterfaceStability.Stable public class NamespaceNotFoundException extends DoNotRetryIOException { private static final long serialVersionUID = -6673607347330260324L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java index 3cb0f5b..ce6acf7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when an operation requires the root and all meta regions to be online */ @InterfaceAudience.Public -@InterfaceStability.Stable public class NotAllMetaRegionsOnlineException extends DoNotRetryIOException { private static final long serialVersionUID = 6439786157874827523L; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java index 8975c74..bca9cd2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.Bytes; * serving. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class NotServingRegionException extends IOException { private static final long serialVersionUID = 1L << 17 - 1L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java index a5ae44b..dc6fd4b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * This exception is thrown by the master when a region server was shut down and @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Stable public class PleaseHoldException extends HBaseIOException { public PleaseHoldException(String message) { super(message); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java index 24ea16c..e0c0346 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when something happens related to region handling. * Subclasses have to be more specific. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RegionException extends HBaseIOException { private static final long serialVersionUID = 1473510258071111371L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java index befb2de..d6c028d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLoad.java @@ -23,7 +23,6 @@ package org.apache.hadoop.hbase; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId; import org.apache.hadoop.hbase.util.Bytes; @@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.util.Strings; * Encapsulates per-region load metrics. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionLoad { protected ClusterStatusProtos.RegionLoad regionLoadPB; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java index fd5fc26..5d4cad4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown by a region server if it will block and wait to serve a request. @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * region is compacting. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionTooBusyException extends IOException { private static final long serialVersionUID = 1728345723728342L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java index daf7dd5..8ad93f2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ReplicationPeerNotFoundException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a replication peer can not be found */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ReplicationPeerNotFoundException extends DoNotRetryIOException { private static final long serialVersionUID = 1L; @@ -33,4 +31,4 @@ public class ReplicationPeerNotFoundException extends DoNotRetryIOException { public ReplicationPeerNotFoundException(String peerId) { super(peerId); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java index e0b90fd..b97c168 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RetryImmediatelyException.java @@ -20,10 +20,8 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class RetryImmediatelyException extends IOException { public RetryImmediatelyException(String s) { super(s); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java index e884e51..8547dfb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerLoad.java @@ -27,7 +27,6 @@ import java.util.TreeMap; import java.util.TreeSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor; @@ -40,7 +39,6 @@ import org.apache.hadoop.hbase.util.Strings; * This class is used for exporting current state of load on a RegionServer. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ServerLoad { private int stores = 0; private int storefiles = 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java index 623e8cf..6c0f3bb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a table exists but should not */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableExistsException extends DoNotRetryIOException { private static final long serialVersionUID = 1L << 7 - 1L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java index fa1f970..391fd43 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * @@ -26,7 +25,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving @SuppressWarnings("serial") public class TableInfoMissingException extends HBaseIOException { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java index 9b5f728..dcfa857 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** * Thrown if a table should be offline but is not */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableNotDisabledException extends DoNotRetryIOException { private static final long serialVersionUID = 1L << 19 - 1L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java index 0f78ee6..5de406e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.util.Bytes; * Thrown if a table should be enabled but is not */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableNotEnabledException extends DoNotRetryIOException { private static final long serialVersionUID = 262144L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java index 8ac5e20..6344cd3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java @@ -19,12 +19,10 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** Thrown when a table can not be located */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableNotFoundException extends DoNotRetryIOException { private static final long serialVersionUID = 993179627856392526L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java index 2ebba32..e9684ae 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.DoNotRetryRegionException; /** * Thrown when we are asked to operate on a region we know nothing about. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class UnknownRegionException extends DoNotRetryRegionException { private static final long serialVersionUID = 1968858760475205392L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java index 3e7b22d..8f7d441 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * down and has cancelled all leases. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class UnknownScannerException extends DoNotRetryIOException { private static final long serialVersionUID = 993179627856392526L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java index 422a659..c492a27 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java @@ -21,13 +21,11 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if the client can't connect to zookeeper */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ZooKeeperConnectionException extends IOException { private static final long serialVersionUID = 1L << 23 - 1L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java index cc14acd..f2fc9a5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.replication.TableCFs; import org.apache.hadoop.hbase.client.security.SecurityCapability; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; @@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.util.Pair; * @since 0.99.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface Admin extends Abortable, Closeable { int getOperationTimeout(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java index 15497ce..a655c7d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Bytes; @@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.util.Bytes; * {@link #add(byte[], byte[], byte[])} method. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Append extends Mutation { /** * @param returnResults diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java index 9945c40..dd340b6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncAdmin.java @@ -27,14 +27,12 @@ import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Pair; /** * The asynchronous administrative API for HBase. */ -@InterfaceAudience.Public -@InterfaceStability.Unstable +@InterfaceAudience.Private public interface AsyncAdmin { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java index dbe32ca..65005fa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncConnection.java @@ -23,13 +23,11 @@ import java.util.concurrent.ExecutorService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * The asynchronous version of Connection. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface AsyncConnection extends Closeable { /** @@ -105,4 +103,4 @@ public interface AsyncConnection extends Closeable { * @return an AsyncAdmin instance for cluster administration */ AsyncAdmin getAdmin(); -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java index 402ad64..a2e193c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * The asynchronous table for normal users. @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * to provide a {@code ExecutorService}. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface AsyncTable extends AsyncTableBase { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java index b5a251b..73ebebb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBase.java @@ -30,7 +30,6 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.util.Bytes; * from the returned {@link CompletableFuture}. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface AsyncTableBase { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java index 2330855..1cc5f10 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableBuilder.java @@ -22,7 +22,6 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.retries2Attempts; import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * For creating {@link AsyncTable} or {@link RawAsyncTable}. @@ -32,7 +31,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * AsyncTable/RawAsyncTable instance. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface AsyncTableBuilder { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java index 989e8d9..13434a6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTableRegionLocator.java @@ -22,7 +22,6 @@ import java.util.concurrent.CompletableFuture; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * The asynchronous version of RegionLocator. @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * from the returned {@link CompletableFuture}. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface AsyncTableRegionLocator { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java index 78d3398..fd36e76 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Attributes.java @@ -22,10 +22,8 @@ package org.apache.hadoop.hbase.client; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Stable public interface Attributes { /** * Sets an attribute. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java index cea9304..766c28c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutator.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import java.io.Closeable; import java.io.IOException; @@ -61,7 +60,6 @@ import java.util.List; * @since 1.0.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface BufferedMutator extends Closeable { /** * Key to use setting non-default BufferedMutator implementation in Configuration. @@ -138,7 +136,6 @@ public interface BufferedMutator extends Closeable { * Listens for asynchronous exceptions on a {@link BufferedMutator}. */ @InterfaceAudience.Public - @InterfaceStability.Evolving interface ExceptionListener { public void onException(RetriesExhaustedWithDetailsException exception, BufferedMutator mutator) throws RetriesExhaustedWithDetailsException; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java index 9c901e2..060fc77 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/BufferedMutatorParams.java @@ -23,13 +23,11 @@ import java.util.concurrent.ExecutorService; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Parameters for instantiating a {@link BufferedMutator}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class BufferedMutatorParams implements Cloneable { static final int UNSET = -1; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java index 17fec2b..9432378 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Currently, there are only two compact types: @@ -25,11 +24,10 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * {@code MOB} means do mob files compaction. * */ @InterfaceAudience.Public -@InterfaceStability.Unstable public enum CompactType { NORMAL (0), MOB (1); CompactType(int value) {} -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java index b4824ef..8f2a83b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactionState.java @@ -17,13 +17,11 @@ */ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * POJO representing the compaction state */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum CompactionState { NONE, MINOR, MAJOR, MAJOR_AND_MINOR; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java index a8cd296..8eedb79 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Connection.java @@ -26,7 +26,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A cluster connection encapsulating lower level individual connections to actual servers and @@ -50,7 +49,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * @since 0.99.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface Connection extends Abortable, Closeable { /* diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java index 64f337a..156a3c8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionFactory.java @@ -26,7 +26,6 @@ import java.util.concurrent.ExecutorService; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.ReflectionUtils; @@ -54,7 +53,6 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; * @since 0.99.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ConnectionFactory { public static final String HBASE_CLIENT_ASYNC_CONNECTION_IMPL = "hbase.client.async.connection.impl"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java index 39323d6..b6e0531 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Consistency.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Consistency defines the expected consistency level for an operation. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum Consistency { // developer note: Do not reorder. Client.proto#Consistency depends on this order /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java index 0eb1d2b..278ea58 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.visibility.CellVisibility; import org.apache.hadoop.hbase.util.Bytes; @@ -69,7 +68,6 @@ import org.apache.hadoop.hbase.util.Bytes; * timestamp. The constructor timestamp is not referenced. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Delete extends Mutation implements Comparable { /** * Create a Delete operation for the specified row. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java index 3c5dd28..e0b5ead 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Similar to RegionException, but disables retries. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class DoNotRetryRegionException extends DoNotRetryIOException { private static final long serialVersionUID = 6907047686199321701L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java index 9b35e04..a3fa1d2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Durability.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Enum describing the durability guarantees for tables and {@link Mutation}s * Note that the items must be sorted in order of increasing durability */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum Durability { /* Developer note: Do not rename the enum field names. They are serialized in HTableDescriptor */ /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index 3771aff..c3ddc4b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -34,7 +34,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.security.access.Permission; @@ -65,7 +64,6 @@ import org.apache.hadoop.hbase.util.Bytes; * To add a filter, call {@link #setFilter(Filter) setFilter}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Get extends Query implements Row, Comparable { private static final Log LOG = LogFactory.getLog(Get.class); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java index f3a58ad..313125c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java @@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -67,7 +66,6 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; * This class is thread safe. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HTableMultiplexer { private static final Log LOG = LogFactory.getLog(HTableMultiplexer.class.getName()); @@ -271,7 +269,6 @@ public class HTableMultiplexer { * in total or on per region server basis. */ @InterfaceAudience.Public - @InterfaceStability.Evolving public static class HTableMultiplexerStatus { private long totalFailedPutCounter; private long totalBufferedPutCounter; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java index eb1cbc5..179a566 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.visibility.CellVisibility; @@ -49,7 +48,6 @@ import org.apache.hadoop.hbase.util.ClassSize; * {@link #addColumn(byte[], byte[], long)} method. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Increment extends Mutation implements Comparable { private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE; private TimeRange tr = new TimeRange(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java index 01aba6f..ad0897e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Specify Isolation levels in Scan operations. @@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * not have been committed yet. */ @InterfaceAudience.Public -@InterfaceStability.Stable public enum IsolationLevel { READ_COMMITTED(1), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java index 7e31b25..5fa9ec2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterSwitchType.java @@ -17,13 +17,11 @@ */ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Represents the master switch type */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum MasterSwitchType { SPLIT, MERGE -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java index f550572..076ab6f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MobCompactPartitionPolicy.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Enum describing the mob compact partition policy types. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum MobCompactPartitionPolicy { /** * Compact daily mob files into one file diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java index fb55fdd..b010c2f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java @@ -38,7 +38,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; @@ -58,7 +57,6 @@ import com.google.common.io.ByteArrayDataOutput; import com.google.common.io.ByteStreams; @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Mutation extends OperationWithAttributes implements Row, CellScannable, HeapSize { public static final long MUTATION_OVERHEAD = ClassSize.align( diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java index 126b117..e628911 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when no region server can be found for a region */ @InterfaceAudience.Public -@InterfaceStability.Stable public class NoServerForRegionException extends DoNotRetryRegionException { private static final long serialVersionUID = 1L << 11 - 1L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java index 4f25e2c..130cf16 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Operation.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.JsonMapper; /** @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.JsonMapper; * Contains methods for exposure to logging and debugging tools. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Operation { // TODO make this configurable // TODO Do we need this anymore now we have protobuffed it all? diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java index cc863b9..ba21cbb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java @@ -24,12 +24,10 @@ import java.util.HashMap; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class OperationWithAttributes extends Operation implements Attributes { // An opaque blob of attributes private Map attributes; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java index 701dceb..5c3ac4b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java @@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.IndividualBytesFieldCell; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.visibility.CellVisibility; @@ -51,7 +50,6 @@ import org.apache.hadoop.hbase.util.Bytes; * setting the timestamp. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Put extends Mutation implements HeapSize, Comparable { /** * Create a Put operation for the specified row. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java index 1322ef5..7f50d13 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java @@ -22,7 +22,6 @@ import java.util.Map; import com.google.common.collect.Maps; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.TimeRange; @@ -38,7 +37,6 @@ import com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Query extends OperationWithAttributes { private static final String ISOLATION_LEVEL = "_isolationlevel_"; protected Filter filter = null; @@ -275,4 +273,4 @@ public abstract class Query extends OperationWithAttributes { public Map getColumnFamilyTimeRange() { return this.colFamTimeRangeMap; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java index e493123..4a916d3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTable.java @@ -26,7 +26,6 @@ import java.util.function.Function; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A low level asynchronous table. @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * most features of AsyncTable, we can think about merge these two interfaces. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface RawAsyncTable extends AsyncTableBase { /** @@ -93,7 +91,6 @@ public interface RawAsyncTable extends AsyncTableBase { * */ @InterfaceAudience.Public - @InterfaceStability.Unstable @FunctionalInterface interface CoprocessorCallable { @@ -175,7 +172,6 @@ public interface RawAsyncTable extends AsyncTableBase { * */ @InterfaceAudience.Public - @InterfaceStability.Unstable interface CoprocessorCallback { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java index 899c0bb..820960b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawScanResultConsumer.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; @@ -32,14 +31,12 @@ import org.apache.hadoop.hbase.client.metrics.ScanMetrics; * consuming tasks in all methods below unless you know what you are doing. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface RawScanResultConsumer { /** * Used to resume a scan. */ @InterfaceAudience.Public - @InterfaceStability.Unstable interface ScanResumer { /** @@ -60,7 +57,6 @@ public interface RawScanResultConsumer { * thrown if you have already called one of the methods. */ @InterfaceAudience.Public - @InterfaceStability.Unstable interface ScanController { /** @@ -122,4 +118,4 @@ public interface RawScanResultConsumer { */ default void onScanMetricsCreated(ScanMetrics scanMetrics) { } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java index bfdb216..5f9cd6d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLoadStats.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * POJO representing region server load */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionLoadStats { int memstoreLoad; int heapOccupancy; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java index 39518a6..2c96a4a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionLocator.java @@ -25,7 +25,6 @@ import java.util.List; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Pair; /** @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Pair; * @since 0.99.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface RegionLocator extends Closeable { /** * Finds the region on which the given row is being served. Does not reload the cache. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java index d6cceb9..018bc69 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java @@ -20,11 +20,9 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.RegionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** Thrown when a table can not be located */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionOfflineException extends RegionException { private static final long serialVersionUID = 466008402L; /** default constructor */ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java index 46e730e..33fed2c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestController.java @@ -25,17 +25,14 @@ import java.util.function.Consumer; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * An interface for client request scheduling algorithm. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface RequestController { @InterfaceAudience.Public - @InterfaceStability.Evolving public enum ReturnCode { /** * Accept current row. @@ -55,7 +52,6 @@ public interface RequestController { * Picks up the valid data. */ @InterfaceAudience.Public - @InterfaceStability.Evolving public interface Checker { /** * Checks the data whether it is valid to submit. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java index 7ed80f0..ba4babd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RequestControllerFactory.java @@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.ReflectionUtils; /** * A factory class that constructs an {@link org.apache.hadoop.hbase.client.RequestController}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class RequestControllerFactory { public static final String REQUEST_CONTROLLER_IMPL_CONF_KEY = "hbase.client.request.controller.impl"; /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java index f8682ec..63aab80 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -79,7 +78,6 @@ import org.apache.hadoop.hbase.util.Bytes; * in then use {@link #copyFrom(Result)} */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Result implements CellScannable, CellScanner { private Cell[] cells; private Boolean exists; // if the query was just to check existence. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java index 8951e84..ef8d887 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java @@ -26,14 +26,12 @@ import java.util.Iterator; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; /** * Interface for client-side scanning. Go to {@link Table} to obtain instances. */ @InterfaceAudience.Public -@InterfaceStability.Stable public interface ResultScanner extends Closeable, Iterable { @Override diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java index dc25f64..eec9f62 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java @@ -23,14 +23,12 @@ import java.util.Date; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception thrown by HTable methods when an attempt to do something (like * commit changes) fails after a bunch of retries. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RetriesExhaustedException extends IOException { private static final long serialVersionUID = 1876775844L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java index 8b09222..70d5548 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java @@ -30,7 +30,6 @@ import java.util.Set; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.util.Bytes; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Stable public class RetriesExhaustedWithDetailsException extends RetriesExhaustedException { List exceptions; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java index cea45fc..79f27bc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Row.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Has a row. */ @InterfaceAudience.Public -@InterfaceStability.Stable public interface Row extends Comparable { /** * @return The row. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java index 85fd590..758bce6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.client; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Provide a way to access the inner buffer. @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * @param */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface RowAccess extends Iterable { /** * @return true if there are no elements. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java index 8a8193e..a9384ac 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Bytes; * into Sets or using them as keys in Maps. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RowMutations implements Row { private final List mutations; private final byte [] row; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java index 69b57b0..e32127c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowTooBigException.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Gets or Scans throw this exception if running without in-row scan flag @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * hbase.table.max.rowsize). */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RowTooBigException extends DoNotRetryRegionException { public RowTooBigException(String message) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java index 2b2e4c8..67d1b4d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java @@ -18,12 +18,10 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import java.io.IOException; @InterfaceAudience.Public -@InterfaceStability.Evolving public interface RpcRetryingCaller { void cancel(); @@ -49,4 +47,4 @@ public interface RpcRetryingCaller { */ T callWithoutRetries(RetryingCallable callable, int callTimeout) throws IOException, RuntimeException; -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 0047d2f..7bc78d4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -33,7 +33,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.IncompatibleFilterException; @@ -87,7 +86,6 @@ import org.apache.hadoop.hbase.util.Bytes; * instance per usage. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Scan extends Query { private static final Log LOG = LogFactory.getLog(Scan.class); @@ -1131,7 +1129,6 @@ public class Scan extends Query { } @InterfaceAudience.Public - @InterfaceStability.Unstable public enum ReadType { DEFAULT, STREAM, PREAD } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java index 03b1ba0..5a4170f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; /** * Receives {@link Result} for an asynchronous scan. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public interface ScanResultConsumer { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java index d70c76f..72b2a15 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat * networking, etc..) when talking to a local master */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ShortCircuitMasterConnection implements MasterKeepAliveConnection { private final MasterService.BlockingInterface stub; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java index b9b6b6c..9a1e1cb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * The POJO equivalent of HBaseProtos.SnapshotDescription */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class SnapshotDescription { private final String name; private final TableName table; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java index e3e12bd..c9820ca 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotType.java @@ -17,13 +17,11 @@ */ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * POJO representing the snapshot type */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum SnapshotType { DISABLED, FLUSH, SKIPFLUSH; -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java index fa4e5f1..b74823c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SyncCoprocessorRpcChannel.java @@ -28,7 +28,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; @@ -39,7 +38,6 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; * {@link org.apache.hadoop.hbase.client.Table#coprocessorService(byte[])}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel { private static final Log LOG = LogFactory.getLog(SyncCoprocessorRpcChannel.class); @@ -77,4 +75,4 @@ abstract class SyncCoprocessorRpcChannel implements CoprocessorRpcChannel { protected abstract Message callExecService(RpcController controller, Descriptors.MethodDescriptor method, Message request, Message responsePrototype) throws IOException; -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java index 90fee8d..933329f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java @@ -27,7 +27,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; @@ -49,7 +48,6 @@ import com.google.protobuf.ServiceException; * @since 0.99.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface Table extends Closeable { /** * Gets the fully qualified table name instance of this table. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java index 27e1596..3eedb10 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableBuilder.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * For creating {@link Table} instance. @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * Table instance. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface TableBuilder { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java index 33aef79..742acee 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java @@ -21,10 +21,8 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving class UnmodifyableHRegionInfo extends HRegionInfo { /* * Creates an unmodifyable copy of an HRegionInfo diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java index 59a1bd5..b5f5ae9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java @@ -22,13 +22,11 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Read-only table descriptor. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class UnmodifyableHTableDescriptor extends HTableDescriptor { /** * Default constructor. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java index e0609da..69729f5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/WrongRowIOException.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.client; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class WrongRowIOException extends HBaseIOException { private static final long serialVersionUID = -5849522209440123059L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java index 94e434f..aab0368 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicy.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.client.backoff; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Configurable policy for the amount of time a client should wait for a new request to the @@ -28,8 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * Must have a single-argument constructor that takes a {@link org.apache.hadoop.conf.Configuration} *

*/ -@InterfaceAudience.Public -@InterfaceStability.Unstable +@InterfaceAudience.Private public interface ClientBackoffPolicy { public static final String BACKOFF_POLICY_CLASS = @@ -39,4 +37,4 @@ public interface ClientBackoffPolicy { * @return the number of ms to wait on the client based on the */ public long getBackoffTime(ServerName serverName, byte[] region, ServerStatistics stats); -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java index b41133a..104b1a4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java @@ -23,7 +23,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import com.google.common.base.Preconditions; @@ -31,8 +30,7 @@ import com.google.common.base.Preconditions; * Simple exponential backoff policy on for the client that uses a percent^4 times the * max backoff to generate the backoff time. */ -@InterfaceAudience.Public -@InterfaceStability.Unstable +@InterfaceAudience.Private public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy { private static final Log LOG = LogFactory.getLog(ExponentialClientBackoffPolicy.class); @@ -104,4 +102,4 @@ public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy { "Value %s must be within the range [%s,%s]", valueIn, baseMin, baseMax); return ((limitMax - limitMin) * (valueIn - baseMin) / (baseMax - baseMin)) + limitMin; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java index a9c23cc..0b783d7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java @@ -22,14 +22,12 @@ package org.apache.hadoop.hbase.client.coprocessor; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A collection of interfaces and utilities used for interacting with custom RPC * interfaces exposed by Coprocessors. */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class Batch { /** * Defines a unit of work to be executed. @@ -50,7 +48,6 @@ public abstract class Batch { * @param the return type from {@link Batch.Call#call(Object)} */ @InterfaceAudience.Public - @InterfaceStability.Stable public interface Call { R call(T instance) throws IOException; } @@ -72,7 +69,6 @@ public abstract class Batch { * org.apache.hadoop.hbase.client.coprocessor.Batch.Call) */ @InterfaceAudience.Public - @InterfaceStability.Stable public interface Callback { void update(byte[] region, byte[] row, R result); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java index 73b3892..7813527 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client.metrics; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** @@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * for now. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ScanMetrics extends ServerSideScanMetrics { // AtomicLongs to hold the metrics values. These are all updated through ClientScanner and diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java index b14938b..8a96aeb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java @@ -22,7 +22,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import com.google.common.collect.ImmutableMap; @@ -30,7 +29,6 @@ import com.google.common.collect.ImmutableMap; * Provides server side metrics related to scan operations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ServerSideScanMetrics { /** * Hash to hold the String -> Atomic Long mappings for each metric diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java index 0eae10b..94425f9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java @@ -41,7 +41,6 @@ import org.apache.hadoop.hbase.ReplicationPeerNotFoundException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -86,7 +85,6 @@ import com.google.common.collect.Lists; * @deprecated use {@link org.apache.hadoop.hbase.client.Admin} instead. */ @InterfaceAudience.Public -@InterfaceStability.Evolving @Deprecated public class ReplicationAdmin implements Closeable { private static final Log LOG = LogFactory.getLog(ReplicationAdmin.class); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java index f293586..854517e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java @@ -23,14 +23,12 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Used by {@link org.apache.hadoop.hbase.client.Admin#listReplicatedTableCFs()}. * The cfs is a map of . */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TableCFs { private final TableName table; private final Map cfs; @@ -58,4 +56,4 @@ public class TableCFs { } return sb.toString(); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java index 1847b2e..ab70616 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.client.security; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Available security capabilities */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum SecurityCapability { // Note to implementors: These must match the numbering of Capability values in MasterProtos SIMPLE_AUTHENTICATION(0), diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java index 3b01a9e..8c0d054 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/BypassCoprocessorException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.coprocessor; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if a coprocessor rules we should bypass an operation */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class BypassCoprocessorException extends CoprocessorException { private static final long serialVersionUID = 5943889011582357043L; @@ -41,4 +39,4 @@ public class BypassCoprocessorException extends CoprocessorException { public BypassCoprocessorException(String s) { super(s); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java index 9946d97..541392c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.coprocessor; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if a coprocessor encounters any exception. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class CoprocessorException extends DoNotRetryIOException { private static final long serialVersionUID = 4357922136679804887L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java index 43a4ee4..74621ab 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ConnectionClosingException.java @@ -39,7 +39,6 @@ package org.apache.hadoop.hbase.exceptions; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when the client believes that we are trying to communicate to has @@ -49,7 +48,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * retries and fast fail the operation. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ConnectionClosingException extends IOException { public ConnectionClosingException(String string) { super(string); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java index 5bfd2f3..050b2c1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception thrown if a mutation fails sanity checks. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class FailedSanityCheckException extends org.apache.hadoop.hbase.DoNotRetryIOException { private static final long serialVersionUID = 1788783640409186240L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java index b87e400..2291053 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.DoNotRetryRegionException; @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.DoNotRetryRegionException; * Thrown when something is wrong in trying to merge two regions. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class MergeRegionException extends DoNotRetryRegionException { private static final long serialVersionUID = 4970899110066124122L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java index b31e055..63f8929 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/PreemptiveFastFailException.java @@ -23,7 +23,6 @@ import java.net.ConnectException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when the client believes that we are trying to communicate to has @@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * retries and fast fail the operation. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class PreemptiveFastFailException extends ConnectException { private static final long serialVersionUID = 7129103682617007177L; private long failureCount, timeOfFirstFailureMilliSec, timeOfLatestAttemptMilliSec; @@ -107,4 +105,4 @@ public class PreemptiveFastFailException extends ConnectException { public boolean isGuaranteedClientSideOnly() { return guaranteedClientSideOnly; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java index 06db472..78ea099 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionInRecoveryException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a read request issued against a region which is in recovering state. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionInRecoveryException extends NotServingRegionException { private static final long serialVersionUID = 327302071153799L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java index 0021f4a..c71bc6d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RequestTooBigException.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when the size of the rpc request received by the server is too large. @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * @since 1.3.0 */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RequestTooBigException extends DoNotRetryIOException { private static final long serialVersionUID = -1593339239809586516L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java index 7689eb1..0704189 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/ScannerResetException.java @@ -20,14 +20,12 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when the server side has received an Exception, and asks the Client to reset the scanner * state by closing the current region scanner, and reopening from the start of last seen row. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ScannerResetException extends DoNotRetryIOException { private static final long serialVersionUID = -5649728171144849619L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java index 933e888..9fbc67d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/UnknownProtocolException.java @@ -20,14 +20,12 @@ package org.apache.hadoop.hbase.exceptions; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * An error requesting an RPC protocol that the server is not serving. */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class UnknownProtocolException extends org.apache.hadoop.hbase.DoNotRetryIOException { private Class protocol; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java index b59398b..87b622c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java @@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; @@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class BinaryComparator extends org.apache.hadoop.hbase.filter.ByteArrayComparable { /** * Constructor diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java index 01cb769..2c951f6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java @@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; @@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * {@link BinaryComparator}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class BinaryPrefixComparator extends ByteArrayComparable { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java index dac8864..96ef2e1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java @@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; @@ -34,12 +33,10 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * with the specified byte array. Then returns whether the result is non-zero. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class BitComparator extends ByteArrayComparable { /** Bit operators. */ @InterfaceAudience.Public - @InterfaceStability.Stable public enum BitwiseOp { /** and */ AND, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index 3ae20a1..dad4132 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * makes this filter unsuitable as a Scan filter. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ColumnCountGetFilter extends FilterBase { private int limit = 0; private int count = 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 7d4571e..696f868 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -25,7 +25,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; @@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; * for pagination. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ColumnPaginationFilter extends FilterBase { private int limit = 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index 7230d3a..b6e9607 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -42,7 +41,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ColumnPrefixFilter extends FilterBase { protected byte [] prefix = null; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 99f9926..69b5088 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -50,7 +49,6 @@ import com.google.common.base.Preconditions; * or not. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ColumnRangeFilter extends FilterBase { protected byte[] minColumn = null; protected boolean minColumnInclusive = true; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index bbc31ec..451d7ee 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -25,7 +25,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; @@ -49,12 +48,10 @@ import com.google.common.base.Preconditions; * Multiple filters can be combined using {@link FilterList}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class CompareFilter extends FilterBase { /** Comparison operators. */ @InterfaceAudience.Public - @InterfaceStability.Stable public enum CompareOp { /** less than */ LESS, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index d82eaec..c14314c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -28,7 +28,6 @@ import java.util.Set; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; * full rows for correct filtering */ @InterfaceAudience.Public -@InterfaceStability.Stable public class DependentColumnFilter extends CompareFilter { protected byte[] columnFamily; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index 8dfd2ca..a28855d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * directly rather than a filter. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class FamilyFilter extends CompareFilter { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java index 59aa855..de5d6c6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java @@ -24,7 +24,6 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; /** @@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException; * @see FilterBase */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class Filter { protected transient boolean reversed; /** @@ -156,7 +154,6 @@ public abstract class Filter { * Return codes for filterValue(). */ @InterfaceAudience.Public - @InterfaceStability.Stable public enum ReturnCode { /** * Include the Cell diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index 04eba0c..d533026 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -52,11 +51,9 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * Defaults to {@link Operator#MUST_PASS_ALL}. */ @InterfaceAudience.Public -@InterfaceStability.Stable final public class FilterList extends FilterBase { /** set operator */ @InterfaceAudience.Public - @InterfaceStability.Stable public static enum Operator { /** !AND */ MUST_PASS_ALL, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index 14d23d4..8493610 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -23,7 +23,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * This filter can be used to more efficiently perform row count operations. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class FirstKeyOnlyFilter extends FilterBase { private boolean foundKV = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index 4681fd3..ac5f125 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -24,7 +24,6 @@ import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; @@ -46,7 +45,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; * @deprecated Deprecated in 2.0. See HBASE-13347 */ @InterfaceAudience.Public -@InterfaceStability.Stable @Deprecated public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 65c2a61..895ffc8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -59,7 +58,6 @@ import com.google.common.annotations.VisibleForTesting; * mask is "????_99_????_01", where at ? can be any value. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class FuzzyRowFilter extends FilterBase { private static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned(); private List> fuzzyKeysData; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 7aa807c..ed95a7d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -40,7 +39,6 @@ import com.google.common.base.Preconditions; * Use this filter to include the stop row, eg: [A,Z]. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class InclusiveStopFilter extends FilterBase { private byte [] stopRowKey; private boolean done = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java index 8eba03c..6410ab4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.filter; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Used to indicate a filter incompatibility */ @InterfaceAudience.Public -@InterfaceStability.Stable public class IncompatibleFilterException extends RuntimeException { private static final long serialVersionUID = 3236763276623198231L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java index 81aae0b..0406058 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.filter; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Used to indicate an invalid RowFilter. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class InvalidRowFilterException extends RuntimeException { private static final long serialVersionUID = 2667894046345657865L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index adbf304..b082941 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; @@ -43,7 +42,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * the values. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class KeyOnlyFilter extends FilterBase { boolean lenAsVal; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java index 8bcc7b2..429b498 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java @@ -23,7 +23,6 @@ import java.nio.ByteBuffer; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; @@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.util.Bytes; * A long comparator which numerical compares against the specified byte array */ @InterfaceAudience.Public -@InterfaceStability.Stable public class LongComparator extends ByteArrayComparable { private long longValue; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index 77fbaf4..d398349 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -25,7 +25,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -48,7 +47,6 @@ import org.apache.hadoop.hbase.util.Bytes; * specified list and perform fast-forwarding during scan. Thus, the scan will be quite efficient. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MultiRowRangeFilter extends FilterBase { private List rangeList; @@ -413,7 +411,6 @@ public class MultiRowRangeFilter extends FilterBase { } @InterfaceAudience.Public - @InterfaceStability.Evolving public static class RowRange implements Comparable { private byte[] startRow; private boolean startRowInclusive = true; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 12d9ac7..6bcb561 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -25,7 +25,6 @@ import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Bytes; * columns like 'and', 'anti' but not keys with columns like 'ball', 'act'. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class MultipleColumnPrefixFilter extends FilterBase { protected byte [] hint = null; protected TreeSet sortedPrefixes = createTreeSet(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java index 0d60e2e..a72afca 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java @@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; @@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * byte array using {@link org.apache.hadoop.hbase.util.Bytes#compareTo(byte[], byte[])}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class NullComparator extends ByteArrayComparable { public NullComparator() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index 2b91b7a..894e7b4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -23,7 +23,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * locally. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class PageFilter extends FilterBase { private long pageSize = Long.MAX_VALUE; private int rowsAccepted = 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java index 3a20772..785f3f3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java @@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * ParseConstants holds a bunch of constants related to parsing Filter Strings * Used by {@link ParseFilter} */ @InterfaceAudience.Public -@InterfaceStability.Stable public final class ParseConstants { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java index 0823785..21cdd9c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java @@ -33,7 +33,6 @@ import java.util.Stack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.util.Bytes; * Filter Language can be found at: https://issues.apache.org/jira/browse/HBASE-4176 */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ParseFilter { private static final Log LOG = LogFactory.getLog(ParseFilter.class); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index e3cefe5..33b3ead 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.ByteBufferCell; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.ByteBufferUtils; @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; * Pass results that have same row prefix. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class PrefixFilter extends FilterBase { protected byte [] prefix = null; protected boolean passedPrefix = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index cc240f8..72a50fb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -45,7 +44,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * directly rather than a filter. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class QualifierFilter extends CompareFilter { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index 177ed4d..48413ac 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -23,7 +23,6 @@ import java.util.Random; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RandomRowFilter extends FilterBase { protected static final Random random = new Random(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java index 3f05901..2f5a342 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java @@ -26,7 +26,6 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.Bytes; @@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * @see java.util.regex.Pattern */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RegexStringComparator extends ByteArrayComparable { private static final Log LOG = LogFactory.getLog(RegexStringComparator.class); @@ -80,7 +78,6 @@ public class RegexStringComparator extends ByteArrayComparable { /** Engine implementation type (default=JAVA) */ @InterfaceAudience.Public - @InterfaceStability.Stable public enum EngineType { JAVA, JONI diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 4f91f8b..3f6136f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * and stop rows directly rather than a filter. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RowFilter extends CompareFilter { private boolean filterOutRow = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index 192fd97..6b155b0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -27,7 +27,6 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * needed as input (besides for the filtering itself). */ @InterfaceAudience.Public -@InterfaceStability.Stable public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index 3bb80cb..0dbc0bb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException; @@ -68,7 +67,6 @@ import com.google.common.base.Preconditions; * To filter based on the value of all scanned columns, use {@link ValueFilter}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class SingleColumnValueFilter extends FilterBase { protected byte [] columnFamily; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index 5461011..1cdf206 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -51,7 +50,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE *

*/ @InterfaceAudience.Public -@InterfaceStability.Stable public class SkipFilter extends FilterBase { private boolean filterRow = false; private Filter filter; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java index d30d057..157d97c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.filter; import java.util.Locale; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.util.Bytes; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * */ @InterfaceAudience.Public -@InterfaceStability.Stable public class SubstringComparator extends ByteArrayComparable { private String substr; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index 8c58f91..b1409e3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -25,7 +25,6 @@ import java.util.TreeSet; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -42,7 +41,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * or {@link org.apache.hadoop.hbase.client.Scan#setTimeStamp(long)}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TimestampsFilter extends FilterBase { private final boolean canHint; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index c0dacaf..4edb57b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * use {@link SingleColumnValueFilter}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ValueFilter extends CompareFilter { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index 8738962..6de3676 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferE * returns true. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class WhileMatchFilter extends FilterBase { private boolean filterAllRemaining = false; private Filter filter; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java index 7eb96d5..010dbb9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BadAuthException.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class BadAuthException extends FatalConnectionException { public BadAuthException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java index a6777c0..d7f8c1e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallCancelledException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Client side call cancelled. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class CallCancelledException extends HBaseIOException { private static final long serialVersionUID = 309775809470318208L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java index db8c34a..9a67aeb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallTimeoutException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Client-side call timeout */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class CallTimeoutException extends HBaseIOException { public CallTimeoutException(final String msg) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java index 6fd038f..c6dec2d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.ipc; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception indicating that the remote host making this IPC lost its @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * but is only used for logging on the server side, etc. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class CallerDisconnectedException extends IOException { private static final long serialVersionUID = 1L; public CallerDisconnectedException(String msg) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java index ffd27b3..d93d9f6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellScannerButNoCodecException.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if a cellscanner but no codec to encode it with. */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class CellScannerButNoCodecException extends HBaseIOException { -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java index 7ed1f7d..a9c10ce 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.ipc; import com.google.protobuf.BlockingRpcChannel; import com.google.protobuf.RpcChannel; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Base interface which provides clients with an RPC connection to @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * {@link org.apache.hadoop.hbase.client.Table#coprocessorService(byte[])}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface CoprocessorRpcChannel extends RpcChannel, BlockingRpcChannel {} // This Interface is part of our public, client-facing API!!! -// This belongs in client package but it is exposed in our public API so we cannot relocate. \ No newline at end of file +// This belongs in client package but it is exposed in our public API so we cannot relocate. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java index 12f6451..e50a82e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FailedServerException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Indicates that we're trying to connect to a already known as dead server. We will want to @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class FailedServerException extends HBaseIOException { public FailedServerException(String s) { super(s); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java index 721148b..ac9fa97 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FallbackDisallowedException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Indicate that the rpc server tells client to fallback to simple auth but client is disabled to do * so. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class FallbackDisallowedException extends HBaseIOException { private static final long serialVersionUID = -6942845066279358253L; @@ -35,4 +33,4 @@ public class FallbackDisallowedException extends HBaseIOException { super("Server asks us to fall back to SIMPLE auth, " + "but this client is configured to only allow secure connections."); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java index 86d3b89..74bd42f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/FatalConnectionException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when server finds fatal issue w/ connection setup: e.g. bad rpc version @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class FatalConnectionException extends DoNotRetryIOException { public FatalConnectionException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java index a8af69c..fe039d3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.java @@ -28,7 +28,6 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Pair; /** @@ -39,7 +38,6 @@ import org.apache.hadoop.hbase.util.Pair; * the whole process. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class NettyRpcClientConfigHelper { public static final String EVENT_LOOP_CONFIG = "hbase.rpc.client.event-loop.config"; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java index 0e50943..eff5b7f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RemoteWithExtrasException.java @@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.util.DynamicClassLoader; import org.apache.hadoop.ipc.RemoteException; @@ -37,7 +36,6 @@ import org.apache.hadoop.ipc.RemoteException; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving @edu.umd.cs.findbugs.annotations.SuppressWarnings( value = "DP_CREATE_CLASSLOADER_INSIDE_DO_PRIVILEGED", justification = "None. Address sometime.") public class RemoteWithExtrasException extends RemoteException { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java index 6d0b9de..15db1c0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java @@ -22,11 +22,9 @@ package org.apache.hadoop.hbase.ipc; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class ServerNotRunningYetException extends IOException { public ServerNotRunningYetException(String s) { super(s); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java index 0dd8e64..e2ed361 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ServerTooBusyException.java @@ -22,16 +22,14 @@ import java.net.InetSocketAddress; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Throw this in RPC call if there are too many pending requests for one region server */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class ServerTooBusyException extends DoNotRetryIOException { public ServerTooBusyException(InetSocketAddress address, long count) { super("Busy Server! " + count + " concurrent RPCs against " + address); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java index a224a12..63ce25b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/StoppedRpcClientException.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class StoppedRpcClientException extends HBaseIOException { public StoppedRpcClientException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java index 3208876..19914e1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCellCodecException.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class UnsupportedCellCodecException extends FatalConnectionException { public UnsupportedCellCodecException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java index 7ca7dd5..271bd2b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCompressionCodecException.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class UnsupportedCompressionCodecException extends FatalConnectionException { public UnsupportedCompressionCodecException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java index 12e4a7a..5f2fa07 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/UnsupportedCryptoException.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class UnsupportedCryptoException extends FatalConnectionException { public UnsupportedCryptoException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java index 73bd10d..d63c867 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/WrongVersionException.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hbase.ipc; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class WrongVersionException extends FatalConnectionException { public WrongVersionException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java index e0386b5..0ab75da 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaExceededException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.quotas; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Generic quota exceeded exception */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class QuotaExceededException extends DoNotRetryIOException { public QuotaExceededException(String msg) { super(msg); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java index 309dd9c..b8a99a6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaFilter.java @@ -21,14 +21,12 @@ import java.util.HashSet; import java.util.Set; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Strings; /** * Filter to use to filter the QuotaRetriever results. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class QuotaFilter { private Set types = new HashSet<>(); private boolean hasFilters = false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java index fecd2d1..cba6a24 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java @@ -29,7 +29,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Result; @@ -43,7 +42,6 @@ import org.apache.hadoop.util.StringUtils; * Scanner to iterate over the quota settings. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class QuotaRetriever implements Closeable, Iterable { private static final Log LOG = LogFactory.getLog(QuotaRetriever.class); @@ -182,4 +180,4 @@ public class QuotaRetriever implements Closeable, Iterable { scanner.init(conf, scan); return scanner; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java index 2e215b6..4a7d241 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaScope.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hbase.quotas; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Describe the Scope of the quota rules. * The quota can be enforced at the cluster level or at machine level. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum QuotaScope { /** * The specified throttling rules will be applied at the cluster level. diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java index ac6a396..193d165 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettings.java @@ -21,12 +21,10 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest; @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class QuotaSettings { private final String userName; private final String namespace; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java index 1a8b934..3622a32 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaSettingsFactory.java @@ -23,14 +23,12 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas; @InterfaceAudience.Public -@InterfaceStability.Evolving public class QuotaSettingsFactory { static class QuotaGlobalsSettingsBypass extends QuotaSettings { private final boolean bypassGlobals; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java index 40a8b66..1ec649f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaType.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.quotas; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Describe the Quota Type. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum QuotaType { THROTTLE, GLOBAL_BYPASS, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java index 9b456c2..724c880 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottleType.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.quotas; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Describe the Throttle Type. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum ThrottleType { /** Throttling based on the number of requests per time-unit */ REQUEST_NUMBER, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java index 293e9c6..ec665ae 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/ThrottlingException.java @@ -22,7 +22,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Describe the throttling result. @@ -32,12 +31,10 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * as result of this exception. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ThrottlingException extends QuotaExceededException { private static final long serialVersionUID = 1406576492085155743L; @InterfaceAudience.Public - @InterfaceStability.Evolving public enum Type { NumRequestsExceeded, RequestSizeExceeded, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java index 50b8b15..073233b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/BloomType.java @@ -20,10 +20,8 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public enum BloomType { /** * Bloomfilters disabled diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java index d1fdae3..e4de9c7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Reports a problem with a lease */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class LeaseException extends DoNotRetryIOException { private static final long serialVersionUID = 8179703995292418650L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java index d3b1ec1..e2b7bba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if request for nonexistent column family. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class NoSuchColumnFamilyException extends DoNotRetryIOException { private static final long serialVersionUID = -6569952730832331274L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java index ddc2270..54d973b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAbortedException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.regionserver; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown by the region server when it is aborting. */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionServerAbortedException extends RegionServerStoppedException { public RegionServerAbortedException(String s) { super(s); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java index a4a9720..e8651db 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java @@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown if the region server log directory exists (which indicates another * region server is running at the same address) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionServerRunningException extends IOException { private static final long serialVersionUID = 1L << 31 - 1L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java index 95f697e..99af432 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown by the region server when it is in shutting down state. @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class RegionServerStoppedException extends IOException { public RegionServerStoppedException(String s) { super(s); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java index c2460d4..eb69e33 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java @@ -21,13 +21,11 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a request contains a key which is not part of this region */ @InterfaceAudience.Public -@InterfaceStability.Stable public class WrongRegionException extends IOException { private static final long serialVersionUID = 993179627856392526L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java index cc42819..c614a57 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java @@ -21,14 +21,12 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when we fail close of the write-ahead-log file. * Package private. Only used inside this package. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class FailedLogCloseException extends IOException { private static final long serialVersionUID = 1759152841462990925L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java index 4c6ef45..ff79716 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedSyncBeforeLogCloseException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.regionserver.wal; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when we fail close of the write-ahead-log file. * Package private. Only used inside this package. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class FailedSyncBeforeLogCloseException extends FailedLogCloseException { private static final long serialVersionUID = 1759152841462990925L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java index 937e943..66781f1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.replication; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.HBaseException; /** @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.exceptions.HBaseException; * store, loss of connection to a peer cluster or errors during deserialization of replication data. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ReplicationException extends HBaseException { private static final long serialVersionUID = -8885598603988198062L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java index f7cc2dd..badec0e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerConfig.java @@ -27,14 +27,12 @@ import java.util.TreeMap; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** * A configuration for the replication peer cluster. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ReplicationPeerConfig { private String clusterKey; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java index 577d13a..95c84c2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerDescription.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.replication; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * The POJO equivalent of ReplicationProtos.ReplicationPeerDescription */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ReplicationPeerDescription { private final String id; @@ -56,4 +54,4 @@ public class ReplicationPeerDescription { builder.append(", config : " + config); return builder.toString(); } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java index 07b871d..cf57517 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.security; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception thrown by access-related methods. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class AccessDeniedException extends DoNotRetryIOException { private static final long serialVersionUID = 1913879564363001780L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java index 1c4a868..5c89c3f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Table; @@ -43,7 +42,6 @@ import org.apache.hadoop.hbase.util.Bytes; * Utility client for doing access control admin operations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class AccessControlClient { public static final TableName ACL_TABLE_NAME = TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "acl"); @@ -295,4 +293,4 @@ public class AccessControlClient { } return permList; } -} \ No newline at end of file +} diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java index f5d16d4..52d10c4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlConstants.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.security.access; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public interface AccessControlConstants { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java index 7bf5304..b25783d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java @@ -27,7 +27,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.VersionedWritable; @@ -40,12 +39,10 @@ import com.google.common.collect.Maps; * @see TablePermission */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class Permission extends VersionedWritable { protected static final byte VERSION = 0; @InterfaceAudience.Public - @InterfaceStability.Evolving public enum Action { READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A'); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java index 5fdeee9..63a4d09 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/Authorizations.java @@ -22,14 +22,12 @@ import java.util.Collections; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * This class contains visibility labels associated with a Scan/Get deciding which all labeled data * current scan/get can access. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class Authorizations { private List labels; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java index 765559f..8cd1ae7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/CellVisibility.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.security.visibility; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.util.Bytes; * operators AND(&), OR(|) and NOT(!) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class CellVisibility { private String expression; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java index d11c167..8d20de8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/InvalidLabelException.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.security.visibility; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class InvalidLabelException extends DoNotRetryIOException { private static final long serialVersionUID = 1L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java index 3fbf937..3fb039a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/LabelAlreadyExistsException.java @@ -19,10 +19,8 @@ package org.apache.hadoop.hbase.security.visibility; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public -@InterfaceStability.Evolving public class LabelAlreadyExistsException extends DoNotRetryIOException { private static final long serialVersionUID = 1L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java index d87bf14..d99f454 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -26,7 +26,6 @@ import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Table; @@ -53,7 +52,6 @@ import com.google.protobuf.ServiceException; * Utility client for doing visibility labels admin operations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class VisibilityClient { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java index 90dd0a7..4d87bdf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityControllerNotReadyException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase.security.visibility; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /* * This exception indicates that VisibilityController hasn't finished initialization. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class VisibilityControllerNotReadyException extends IOException { private static final long serialVersionUID = 1725986525207989173L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java index d0ef28d..7a1761c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class CorruptedSnapshotException extends HBaseSnapshotException { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java index 05f3556..f6817e7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when a snapshot could not be exported due to an error during the operation. */ @InterfaceAudience.Public -@InterfaceStability.Stable @SuppressWarnings("serial") public class ExportSnapshotException extends HBaseSnapshotException { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java index 2fe58ed..bd185a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; /** @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class HBaseSnapshotException extends DoNotRetryIOException { private SnapshotDescription description; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java index 70e8d3b..de58077 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; /** @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class RestoreSnapshotException extends HBaseSnapshotException { public RestoreSnapshotException(String msg, SnapshotDescription desc) { super(msg, desc); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java index 2738b3d..9cfe83a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; /** @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class SnapshotCreationException extends HBaseSnapshotException { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java index e088408..ae574b4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class SnapshotDoesNotExistException extends HBaseSnapshotException { /** * @param msg full description of the failure diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java index 425f93a..9b31625 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; /** @@ -26,7 +25,6 @@ import org.apache.hadoop.hbase.client.SnapshotDescription; */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class SnapshotExistsException extends HBaseSnapshotException { public SnapshotExistsException(String msg) { super(msg); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java index b27ff65..343d702 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java @@ -21,14 +21,12 @@ import java.io.IOException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** * Thrown if a table should be online/offline but is partially open */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TablePartiallyOpenException extends IOException { private static final long serialVersionUID = 3571982660065058361L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java index e4242f5..dc6dd56 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hbase.snapshot; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception thrown when we get a request for a snapshot we don't recognize. */ @SuppressWarnings("serial") @InterfaceAudience.Public -@InterfaceStability.Evolving public class UnknownSnapshotException extends HBaseSnapshotException { /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java index 2cd1d00..1613582 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java @@ -22,11 +22,9 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** Thrown when the file system needs to be upgraded */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class FileSystemVersionException extends IOException { private static final long serialVersionUID = 1004053363L; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java index 2a9987c..dddd052 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/JsonMapper.java @@ -22,14 +22,12 @@ import java.io.IOException; import java.util.Map; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.codehaus.jackson.map.ObjectMapper; /** * Utility class for converting objects to JSON */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class JsonMapper { private JsonMapper() { } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java index 9acbb43..c5af6ff 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java @@ -40,26 +40,32 @@ import org.apache.hadoop.hbase.ClassFinder.Not; import org.apache.hadoop.hbase.ClassTestFinder.TestClassFilter; import org.apache.hadoop.hbase.ClassTestFinder.TestFileNameFilter; import org.junit.Assert; +import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; /** - * Test cases for ensuring our client visible classes have annotations - * for {@link InterfaceAudience}. - * - * All classes in hbase-client and hbase-common module MUST have InterfaceAudience - * annotations. All InterfaceAudience.Public annotated classes MUST also have InterfaceStability - * annotations. Think twice about marking an interface InterfaceAudience.Public. Make sure that - * it is an interface, not a class (for most cases), and clients will actually depend on it. Once - * something is marked with Public, we cannot change the signatures within the major release. NOT - * everything in the hbase-client module or every java public class has to be marked with + * Test cases for ensuring our client visible classes have annotations for + * {@link InterfaceAudience}. + *

+ * All classes in hbase-client and hbase-common module MUST have InterfaceAudience annotations. + * Think twice about marking an interface InterfaceAudience.Public. Make sure that it is an + * interface, not a class (for most cases), and clients will actually depend on it. Once something + * is marked with Public, we cannot change the signatures within the major release. NOT everything + * in the hbase-client module or every java public class has to be marked with * InterfaceAudience.Public. ONLY the ones that an hbase application will directly use (Table, Get, - * etc, versus ProtobufUtil). - * - * Also note that HBase has it's own annotations in hbase-annotations module with the same names - * as in Hadoop. You should use the HBase's classes. - * - * See https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html + * etc, versus ProtobufUtil). And also, InterfaceAudience.Public annotated classes MUST NOT have + * InterfaceStability annotations. The stability of these classes only depends on versioning. + *

+ * All classes which are marked as InterfaceAudience.LimitedPrivate MUST also have + * InterfaceStability annotations. The only exception is HBaseInterfaceAudience.CONFIG. It is used + * to indicate that the class name will be exposed in user facing configuration files. + *

+ * Also note that HBase has it's own annotations in hbase-annotations module with the same names as + * in Hadoop. You should use the HBase's classes. + *

+ * See + * https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html * and https://issues.apache.org/jira/browse/HBASE-10462. */ @Category(SmallTests.class) @@ -105,16 +111,15 @@ public class TestInterfaceAudienceAnnotations { return false; } - Class ann = getAnnotation(c); - if (ann != null && - !InterfaceAudience.Public.class.equals(ann)) { + Annotation ann = getAnnotation(c); + if (ann != null && !InterfaceAudience.Public.class.equals(ann.annotationType())) { return true; } return isAnnotatedPrivate(c.getEnclosingClass()); } - protected Class getAnnotation(Class c) { + protected Annotation getAnnotation(Class c) { // we should get only declared annotations, not inherited ones Annotation[] anns = c.getDeclaredAnnotations(); @@ -123,7 +128,7 @@ public class TestInterfaceAudienceAnnotations { // an enum instead we have three independent annotations! Class type = ann.annotationType(); if (isInterfaceAudienceClass(type)) { - return type; + return ann; } } return null; @@ -159,13 +164,32 @@ public class TestInterfaceAudienceAnnotations { } } - /** Selects classes with one of the {@link InterfaceAudience.Public} annotation in their - * class declaration. + /** + * Selects classes with one of the {@link InterfaceAudience.Public} annotation in their class + * declaration. */ class InterfaceAudiencePublicAnnotatedClassFilter extends InterfaceAudienceAnnotatedClassFilter { @Override public boolean isCandidateClass(Class c) { - return (InterfaceAudience.Public.class.equals(getAnnotation(c))); + Annotation ann = getAnnotation(c); + return ann != null && InterfaceAudience.Public.class.equals(ann.annotationType()); + } + } + + /** + * Selects classes with one of the {@link InterfaceAudience.LimitedPrivate} annotation in their + * class declaration. + */ + class InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter + extends InterfaceAudienceAnnotatedClassFilter { + @Override + public boolean isCandidateClass(Class c) { + Annotation ann = getAnnotation(c); + if (ann == null || !InterfaceAudience.LimitedPrivate.class.equals(ann.annotationType())) { + return false; + } + InterfaceAudience.LimitedPrivate iaAnn = (InterfaceAudience.LimitedPrivate) ann; + return iaAnn.value().length == 0 || !HBaseInterfaceAudience.CONFIG.equals(iaAnn.value()[0]); } } @@ -288,10 +312,11 @@ public class TestInterfaceAudienceAnnotations { ); Set> classes = classFinder.findClasses(false); - - LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:"); - for (Class clazz : classes) { - LOG.info(clazz); + if (!classes.isEmpty()) { + LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:"); + for (Class clazz : classes) { + LOG.info(clazz); + } } Assert.assertEquals("All classes should have @InterfaceAudience annotation", @@ -300,10 +325,10 @@ public class TestInterfaceAudienceAnnotations { /** * Checks whether all the classes in client and common modules that are marked - * InterfaceAudience.Public also have {@link InterfaceStability} annotations. + * InterfaceAudience.Public do not have {@link InterfaceStability} annotations. */ @Test - public void testInterfaceStabilityAnnotation() + public void testNoInterfaceStabilityAnnotationForPublicAPI() throws ClassNotFoundException, IOException, LinkageError { // find classes that are: @@ -313,7 +338,7 @@ public class TestInterfaceAudienceAnnotations { // NOT test classes // AND NOT generated classes // AND are annotated with InterfaceAudience.Public - // AND NOT annotated with InterfaceStability + // AND annotated with InterfaceStability ClassFinder classFinder = new ClassFinder( new And(new MainCodeResourcePathFilter(), new TestFileNameFilter()), @@ -324,18 +349,65 @@ public class TestInterfaceAudienceAnnotations { new Not(new ShadedProtobufClassFilter()), new InterfaceAudiencePublicAnnotatedClassFilter(), new Not(new IsInterfaceStabilityClassFilter()), - new Not(new InterfaceStabilityAnnotatedClassFilter())) + new InterfaceStabilityAnnotatedClassFilter()) ); Set> classes = classFinder.findClasses(false); - LOG.info("These are the classes that DO NOT have @InterfaceStability annotation:"); - for (Class clazz : classes) { - LOG.info(clazz); + if (!classes.isEmpty()) { + LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " + + "annotation:"); + for (Class clazz : classes) { + LOG.info(clazz); + } } - Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should " - + "have @InterfaceStability annotation as well", + Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should not " + + "have @InterfaceStability annotation", + 0, classes.size()); + } + + /** + * Checks whether all the classes in client and common modules that are marked + * InterfaceAudience.Public do not have {@link InterfaceStability} annotations. + */ + @Ignore + @Test + public void testInterfaceStabilityAnnotationForLimitedAPI() + throws ClassNotFoundException, IOException, LinkageError { + + // find classes that are: + // In the main jar + // AND are not in a hadoop-compat module + // AND are public + // NOT test classes + // AND NOT generated classes + // AND are annotated with InterfaceAudience.LimitedPrivate + // AND NOT annotated with InterfaceStability + ClassFinder classFinder = new ClassFinder( + new And(new MainCodeResourcePathFilter(), + new TestFileNameFilter()), + new Not((FileNameFilter)new TestFileNameFilter()), + new And(new PublicClassFilter(), + new Not(new TestClassFilter()), + new Not(new GeneratedClassFilter()), + new Not(new ShadedProtobufClassFilter()), + new InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter(), + new Not(new IsInterfaceStabilityClassFilter()), + new Not(new InterfaceStabilityAnnotatedClassFilter())) + ); + + Set> classes = classFinder.findClasses(false); + + if (!classes.isEmpty()) { + LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " + + "have @InterfaceStability annotation:"); + for (Class clazz : classes) { + LOG.info(clazz); + } + } + Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " + + "should have @InterfaceStability annotation", 0, classes.size()); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java index bbed218..d715d01 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AuthUtil.java @@ -25,7 +25,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.util.DNS; import org.apache.hadoop.hbase.util.Strings; @@ -68,7 +67,6 @@ import org.apache.hadoop.security.UserGroupInformation; * an example of configuring a user of this Auth Chore to run on a secure cluster. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class AuthUtil { private static final Log LOG = LogFactory.getLog(AuthUtil.class); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java index 53b319b..8a701f2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Cell.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** @@ -60,7 +59,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface Cell { //1) Row diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index bb5197f..6585173 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -36,7 +36,6 @@ import java.util.NavigableMap; import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience.Private; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.TagCompressionContext; import org.apache.hadoop.hbase.io.util.Dictionary; @@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.util.ClassSize; * method level. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class CellUtil { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java index 19363d0..70858f4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ChoreService.java @@ -32,7 +32,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.ScheduledChore.ChoreServicer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * ChoreService is a service that can be used to schedule instances of {@link ScheduledChore} to run @@ -54,7 +53,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * Calling this method ensures that all scheduled chores are cancelled and cleaned up properly. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ChoreService implements ChoreServicer { private static final Log LOG = LogFactory.getLog(ChoreService.class); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java index 4baaabe..885219a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java @@ -26,7 +26,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.hbase.zookeeper.ZKConfig; @@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.zookeeper.ZKConfig; * Adds HBase configuration files to a Configuration */ @InterfaceAudience.Public -@InterfaceStability.Stable public class HBaseConfiguration extends Configuration { private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java index edcbdc5..85e8725 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseIOException.java @@ -20,13 +20,11 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * All hbase specific IOExceptions should be subclasses of HBaseIOException */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HBaseIOException extends IOException { private static final long serialVersionUID = 1L; @@ -46,4 +44,4 @@ public class HBaseIOException extends IOException { public HBaseIOException(Throwable cause) { super(cause); } -} \ No newline at end of file +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java index cb42e48..ae1db7c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HBaseInterfaceAudience.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * This class defines constants for different classes of hbase limited private apis */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class HBaseInterfaceAudience { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java index 3789f71..eff5690 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java @@ -29,14 +29,12 @@ import java.util.regex.Pattern; import org.apache.commons.lang.ArrayUtils; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** * HConstants holds a bunch of HBase-related constants */ @InterfaceAudience.Public -@InterfaceStability.Stable public final class HConstants { // NOTICE!!!! Please do not add a constants here, unless they are referenced by a lot of classes. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java index 23876ab..15f71a9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/NamespaceDescriptor.java @@ -27,7 +27,6 @@ import java.util.TreeMap; import java.util.TreeSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.util.Bytes; * as opposed to a more tangible container. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class NamespaceDescriptor { /** System namespace name. */ @@ -162,7 +160,6 @@ public class NamespaceDescriptor { } @InterfaceAudience.Public - @InterfaceStability.Evolving public static class Builder { private String bName; private Map bConfiguration = new TreeMap<>(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java index f35f27b..bb8bb08 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.NonceKey; @@ -31,7 +30,6 @@ import org.apache.hadoop.util.StringUtils; * Procedure information */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ProcedureInfo implements Cloneable { private final long procId; private final String procName; @@ -216,4 +214,4 @@ public class ProcedureInfo implements Cloneable { return procOwner.equals(user.getShortName()); } -} \ No newline at end of file +} diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java index 306d285..5d95add 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureState.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * POJO representing Procedure State */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum ProcedureState { INITIALIZING, RUNNABLE, WAITING, WAITING_TIMEOUT, ROLLEDBACK, FINISHED; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java index 422ca1a..2d1eec5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java @@ -24,7 +24,6 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import com.google.common.annotations.VisibleForTesting; @@ -42,7 +41,6 @@ import com.google.common.annotations.VisibleForTesting; * an entry being added to a queue, etc. */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class ScheduledChore implements Runnable { private static final Log LOG = LogFactory.getLog(ScheduledChore.class); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java index 0c0a7ff..fabf0c0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -25,7 +25,6 @@ import java.util.Locale; import java.util.regex.Pattern; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Bytes; @@ -56,7 +55,6 @@ import com.google.common.net.InetAddresses; *

Immutable. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ServerName implements Comparable, Serializable { private static final long serialVersionUID = 1367463982557264981L; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java index 9adaa1a..cdb802c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/Stoppable.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Implementers are Stoppable. */ @InterfaceAudience.Public -@InterfaceStability.Stable public interface Stoppable { /** * Stop this service. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java index cba03c0..c4c15d0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java @@ -24,7 +24,6 @@ import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.KeyValue.KVComparator; @@ -53,7 +52,6 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class TableName implements Comparable { /** See {@link #createTableNameIfNecessary(ByteBuffer, ByteBuffer)} */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java index 2133750..8af562e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.filter; import java.nio.ByteBuffer; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.Bytes; /** Base class for byte array comparators */ @InterfaceAudience.Public -@InterfaceStability.Stable // TODO Now we are deviating a lot from the actual Comparable what this implements, by // adding special compareTo methods. We have to clean it. Deprecate this class and replace it // with a more generic one which says it compares bytes (not necessary a byte array only) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index f6f7def..9aaa431 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -28,7 +28,6 @@ import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.ByteBufferUtils; import org.apache.hadoop.hbase.util.Bytes; @@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.util.Bytes; * Not thread safe! */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream implements ByteBufferWriter { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java index f658210..5c7c292 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java @@ -25,7 +25,6 @@ import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparator; @@ -40,7 +39,6 @@ import org.apache.hadoop.io.WritableComparator; * buffer is accessed when we go to serialize. */ @InterfaceAudience.Public -@InterfaceStability.Stable @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", justification="It has been like this forever") @@ -229,7 +227,6 @@ implements WritableComparable { /** A Comparator optimized for ImmutableBytesWritable. */ @InterfaceAudience.Public - @InterfaceStability.Stable public static class Comparator extends WritableComparator { private BytesWritable.Comparator comparator = new BytesWritable.Comparator(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java index 77b9495..764b2a0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/TimeRange.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.io; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.util.Bytes; *

Immutable. Thread-safe. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TimeRange { public static final long INITIAL_MIN_TIMESTAMP = 0L; public static final long INITIAL_MAX_TIMESTAMP = Long.MAX_VALUE; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java index 8dfab44..6f63f80 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -28,7 +28,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; @@ -101,7 +100,6 @@ public final class Compression { value="SE_TRANSIENT_FIELD_NOT_RESTORED", justification="We are not serializing so doesn't apply (not sure why transient though)") @InterfaceAudience.Public - @InterfaceStability.Evolving public static enum Algorithm { LZO("lzo") { // Use base type to avoid compile-time dependencies. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java index e19a13d..3f4bf2b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Cipher.java @@ -22,13 +22,11 @@ import java.io.OutputStream; import java.security.Key; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A common interface for a cryptographic algorithm. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Cipher { public static final int KEY_LENGTH = 16; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java index 5a475cc..e457c13 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CipherProvider.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hbase.io.crypto; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * An CipherProvider contributes support for various cryptographic * Ciphers. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface CipherProvider extends Configurable { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java index 1e2881e..a8dc396 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Context.java @@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.MD5Hash; import com.google.common.base.Preconditions; @@ -31,7 +30,6 @@ import com.google.common.base.Preconditions; * Crypto context. Encapsulates an encryption algorithm and its key material. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class Context implements Configurable { private Configuration conf; private Cipher cipher; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java index 3f5cd2d..6f78e83 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/CryptoCipherProvider.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.io.crypto; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.aes.CommonsCryptoAES; /** * The default cipher provider. Supports AES via the Commons Crypto. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class CryptoCipherProvider implements CipherProvider { private static CryptoCipherProvider instance; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java index d3029db..947e11a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Decryptor.java @@ -21,13 +21,11 @@ import java.io.InputStream; import java.security.Key; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Decryptors apply a cipher to an InputStream to recover plaintext. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface Decryptor { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java index 4f2aebe..9c82b2a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/DefaultCipherProvider.java @@ -19,14 +19,12 @@ package org.apache.hadoop.hbase.io.crypto; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.crypto.aes.AES; /** * The default cipher provider. Supports AES via the JCE. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class DefaultCipherProvider implements CipherProvider { private static DefaultCipherProvider instance; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java index b6c2e97..e8727a7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryption.java @@ -39,7 +39,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.util.ReflectionUtils; @@ -48,7 +47,6 @@ import org.apache.hadoop.util.ReflectionUtils; * A facade for encryption algorithms and related support. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class Encryption { private static final Log LOG = LogFactory.getLog(Encryption.class); @@ -57,7 +55,6 @@ public final class Encryption { * Crypto context */ @InterfaceAudience.Public - @InterfaceStability.Evolving public static class Context extends org.apache.hadoop.hbase.io.crypto.Context { /** The null crypto context */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java index cda703d..4e84a68 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/Encryptor.java @@ -21,13 +21,11 @@ import java.io.OutputStream; import java.security.Key; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Encryptors apply a cipher to an OutputStream to produce ciphertext. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface Encryptor { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java index 515a664..e0542e1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyProvider.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.io.crypto; import java.security.Key; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * KeyProvider is a interface to abstract the different methods of retrieving @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface KeyProvider { public static final String PASSWORD = "password"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java index 7e3c013..1f6c83a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java @@ -34,7 +34,6 @@ import java.util.Locale; import java.util.Properties; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A basic KeyProvider that can resolve keys from a protected KeyStore file @@ -71,7 +70,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * LoadStoreParameters. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class KeyStoreKeyProvider implements KeyProvider { protected KeyStore store; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index d7535e5..1133b91 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.io.OutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.util.Bytes; * in the HBase mailing list to prevent collisions. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum DataBlockEncoding { /** Disable data block encoding. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java index 4cc636e..3ee8cfc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.net; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import com.google.common.net.HostAndPort; @@ -30,7 +29,6 @@ import com.google.common.net.HostAndPort; *

In implementation this class is a facade over Guava's {@link HostAndPort}. * We cannot have Guava classes in our API hence this Type. */ -@InterfaceStability.Evolving @InterfaceAudience.Public public class Address implements Comparable

{ private HostAndPort hostAndPort; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java index 9219c23..c3a6dc7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java @@ -25,14 +25,12 @@ import java.util.TreeSet; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.net.Address; /** * Stores the group information of region server groups. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RSGroupInfo { public static final String DEFAULT_GROUP = "default"; public static final String NAMESPACE_DESC_PROP_GROUP = "hbase.rsgroup.name"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java index be2a0d3..c7d65be 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/User.java @@ -31,7 +31,6 @@ import java.util.concurrent.ExecutionException; import com.google.common.cache.LoadingCache; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Methods; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.SecurityUtil; @@ -51,7 +50,6 @@ import org.apache.hadoop.security.token.TokenIdentifier; *

*/ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class User { public static final String HBASE_SECURITY_CONF_KEY = "hbase.security.authentication"; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java index cd43448..08f1112 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/DataType.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface DataType { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java index 1caf518..cf662e7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/FixedLengthWrapper.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; * variant. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class FixedLengthWrapper implements DataType { protected final DataType base; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java index cd6f614..c40964f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlob.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeBlobCopy(PositionedByteRange, byte[], int, int, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedBlob extends OrderedBytesBase { public static final OrderedBlob ASCENDING = new OrderedBlob(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java index 22a321b..a4ecb9a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBlobVar.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeBlobVar(PositionedByteRange, byte[], int, int, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedBlobVar extends OrderedBytesBase { public static final OrderedBlobVar ASCENDING = new OrderedBlobVar(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java index 682202d..fcc823d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedBytesBase.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * implementations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class OrderedBytesBase implements DataType { protected final Order order; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java index a417f77..f1af97e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat32.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeFloat32(PositionedByteRange, float, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedFloat32 extends OrderedBytesBase { public static final OrderedFloat32 ASCENDING = new OrderedFloat32(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java index 891b07c..a8036cd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedFloat64.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeFloat64(PositionedByteRange, double, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedFloat64 extends OrderedBytesBase { public static final OrderedFloat64 ASCENDING = new OrderedFloat64(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java index b968f5d..6930c86 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt16.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeInt16(PositionedByteRange, short, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedInt16 extends OrderedBytesBase { public static final OrderedInt16 ASCENDING = new OrderedInt16(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java index a504f13..828bae9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt32.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeInt32(PositionedByteRange, int, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedInt32 extends OrderedBytesBase { public static final OrderedInt32 ASCENDING = new OrderedInt32(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java index 3fb65e3..219911e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt64.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeInt64(PositionedByteRange, long, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedInt64 extends OrderedBytesBase { public static final OrderedInt64 ASCENDING = new OrderedInt64(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java index 50fcec0..3767b75 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedInt8.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeInt8(PositionedByteRange, byte, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedInt8 extends OrderedBytesBase { public static final OrderedInt8 ASCENDING = new OrderedInt8(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java index db34cf5..9a37e71 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedNumeric.java @@ -21,7 +21,6 @@ import java.math.BigDecimal; import java.math.BigInteger; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; * from text. Built on {@link OrderedBytes#encodeNumeric(PositionedByteRange, BigDecimal, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedNumeric extends OrderedBytesBase { public static final OrderedNumeric ASCENDING = new OrderedNumeric(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java index f7401bf..e5fcd5a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/OrderedString.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.OrderedBytes; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -28,7 +27,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * {@link OrderedBytes#encodeString(PositionedByteRange, String, Order)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedString extends OrderedBytesBase { public static final OrderedString ASCENDING = new OrderedString(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java index 3d545f6..2718e98 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/PBType.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import com.google.protobuf.Message; * {@code PBKeyValue} in {@code hbase-examples} module. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class PBType implements DataType { @Override public boolean isOrderPreserving() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java index 9f90350..fdaff10 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawByte.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see Bytes#putByte(byte[], int, byte) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawByte implements DataType { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java index bea3c5f..449edfc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytes.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see OrderedBlobVar */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawBytes implements DataType { public static final RawBytes ASCENDING = new RawBytes(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java index bfd6416..0333721 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesFixedLength.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see OrderedBlobVar */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawBytesFixedLength extends FixedLengthWrapper { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java index 8bc4c20..1b67bd8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawBytesTerminated.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -32,7 +31,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see OrderedBlob */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawBytesTerminated extends TerminatedWrapper { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java index 776639c..c100ccd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawDouble.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see Bytes#toDouble(byte[]) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawDouble implements DataType { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java index 5c2f823..751e4ef 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawFloat.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see Bytes#toFloat(byte[]) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawFloat implements DataType { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java index 0d9e4eb..ab09322 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawInteger.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see Bytes#toInt(byte[]) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawInteger implements DataType { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java index b8bbcd2..0da3bbd 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawLong.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see Bytes#toLong(byte[]) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawLong implements DataType { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java index bc1ef30..cb342e7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawShort.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see Bytes#toShort(byte[]) */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawShort implements DataType { @Override diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java index 7e3b350..b70e103 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawString.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -32,7 +31,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see RawStringTerminated */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawString implements DataType { public static final RawString ASCENDING = new RawString(Order.ASCENDING); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java index d11bead..24a394c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringFixedLength.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; /** @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.Order; * @see RawString */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawStringFixedLength extends FixedLengthWrapper { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java index 4d89d5b..408b57a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/RawStringTerminated.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; /** @@ -33,7 +32,6 @@ import org.apache.hadoop.hbase.util.Order; * @see OrderedString */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RawStringTerminated extends TerminatedWrapper { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java index 550088a..eea64d9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Struct.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.types; import java.util.Iterator; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -78,7 +77,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; * @see DataType#isNullable() */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class Struct implements DataType { @SuppressWarnings("rawtypes") diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java index d73a17d..ad4f021 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructBuilder.java @@ -21,13 +21,11 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A helper for building {@link Struct} instances. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class StructBuilder { protected final List> fields = new ArrayList<>(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java index 11808f4..5793527 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/StructIterator.java @@ -21,7 +21,6 @@ import java.util.Iterator; import java.util.NoSuchElementException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.PositionedByteRange; /** @@ -47,7 +46,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public class StructIterator implements Iterator { protected final PositionedByteRange src; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java index 7485f88..f05b2ef 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/TerminatedWrapper.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.util.SimplePositionedMutableByteRange; * {@code skippable} variant. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TerminatedWrapper implements DataType { protected final DataType wrapped; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java index 2ffc174..c4e6c6b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union2.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; */ @SuppressWarnings("unchecked") @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Union2 implements DataType { protected final DataType typeA; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java index 64afc5b..79c37d0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union3.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; */ @SuppressWarnings("unchecked") @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Union3 extends Union2 { protected final DataType typeC; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java index 1eb8529..387d8d5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/Union4.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.types; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Order; import org.apache.hadoop.hbase.util.PositionedByteRange; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.util.PositionedByteRange; */ @SuppressWarnings("unchecked") @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class Union4 extends Union3 { protected final DataType typeD; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index b825c0f..3f021eb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -41,7 +41,6 @@ import java.util.zip.GZIPOutputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Encodes and decodes to and from Base64 notation. @@ -118,7 +117,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * version: 2.2.1 */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Base64 { /* ******** P U B L I C F I E L D S ******** */ @@ -1290,7 +1288,6 @@ public class Base64 { * @since 1.3 */ @InterfaceAudience.Public - @InterfaceStability.Stable public static class Base64InputStream extends FilterInputStream { private boolean encode; // Encoding or decoding private int position; // Current position in the buffer @@ -1492,7 +1489,6 @@ public class Base64 { * @since 1.3 */ @InterfaceAudience.Public - @InterfaceStability.Stable public static class Base64OutputStream extends FilterOutputStream { private boolean encode; private int position; @@ -1538,7 +1534,6 @@ public class Base64 { * @since 1.3 */ @InterfaceAudience.Public - @InterfaceStability.Stable public Base64OutputStream(OutputStream out, int options) { super(out); this.breakLines = (options & DONT_BREAK_LINES) != DONT_BREAK_LINES; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java index 760afd4..8d5120b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java @@ -28,7 +28,6 @@ import java.nio.ByteBuffer; import java.util.Arrays; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.ByteBufferWriter; import org.apache.hadoop.hbase.io.util.StreamUtils; import org.apache.hadoop.io.IOUtils; @@ -42,7 +41,6 @@ import sun.nio.ch.DirectBuffer; */ @SuppressWarnings("restriction") @InterfaceAudience.Public -@InterfaceStability.Evolving public final class ByteBufferUtils { // "Compressed integer" serialization helper constants. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRange.java index d547db1..ffb1418 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRange.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Lightweight, reusable class for specifying ranges of byte[]'s. @@ -60,7 +59,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface ByteRange extends Comparable { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java index 9248b41..672366d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java @@ -24,7 +24,6 @@ import java.util.ArrayList; import java.util.Collection; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import com.google.common.collect.Lists; @@ -32,7 +31,6 @@ import com.google.common.collect.Lists; * Utility methods for working with {@link ByteRange}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ByteRangeUtils { public static int numEqualPrefixBytes(ByteRange left, ByteRange right, int rightInnerOffset) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 626132b..704d97f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.WritableComparator; import org.apache.hadoop.io.WritableUtils; @@ -60,7 +59,6 @@ import com.google.protobuf.ByteString; */ @SuppressWarnings("restriction") @InterfaceAudience.Public -@InterfaceStability.Stable @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="EQ_CHECK_FOR_OPERAND_NOT_COMPATIBLE_WITH_THIS", justification="It has been like this forever") @@ -335,7 +333,6 @@ public class Bytes implements Comparable { * Byte array comparator class. */ @InterfaceAudience.Public - @InterfaceStability.Stable public static class ByteArrayComparator implements RawComparator { /** * Constructor @@ -363,7 +360,6 @@ public class Bytes implements Comparable { // while comparing row keys, start keys etc; but as the largest value for comparing // region boundaries for endKeys. @InterfaceAudience.Public - @InterfaceStability.Stable public static class RowEndKeyComparator extends ByteArrayComparator { @Override public int compare(byte[] left, byte[] right) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java index 36ca7ad..f5788e9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Counter.java @@ -22,14 +22,12 @@ import java.util.concurrent.atomic.AtomicLongFieldUpdater; import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * High scalable counter. Thread safe. * @deprecated use {@link java.util.concurrent.atomic.LongAdder} instead. */ @InterfaceAudience.Public -@InterfaceStability.Evolving @Deprecated public class Counter { private static final int MAX_CELLS_LENGTH = 1 << 20; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java index 1b2ddb0..29b0bb0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java @@ -26,14 +26,12 @@ import org.apache.commons.codec.binary.Hex; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Utility class for MD5 * MD5 hash produces a 128-bit digest. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class MD5Hash { private static final Log LOG = LogFactory.getLog(MD5Hash.class); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java index 9cbbe7e..14a08d3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Order.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Used to describe or modify the lexicographical sort order of a @@ -27,7 +26,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * by replacing each byte with its 1's compliment. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public enum Order { ASCENDING { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index a167562..3e4bc6c 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -27,7 +27,6 @@ import java.math.RoundingMode; import java.nio.charset.Charset; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import com.google.common.annotations.VisibleForTesting; @@ -267,7 +266,6 @@ import com.google.common.annotations.VisibleForTesting; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public class OrderedBytes { /* diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java index 719d1ee..f42f63b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Pair.java @@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.util; import java.io.Serializable; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A generic class for pairs. @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * @param */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Pair implements Serializable { private static final long serialVersionUID = -3986244606585552569L; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java index b9d9da9..3d520a8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java @@ -23,7 +23,6 @@ import java.util.Iterator; import org.apache.commons.lang.NotImplementedException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A generic, immutable class for pairs of objects both of type T. @@ -31,7 +30,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * @see Pair if Types differ. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class PairOfSameType implements Iterable { private final T first; private final T second; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PositionedByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PositionedByteRange.java index 3c47d86..3643fd6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PositionedByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/PositionedByteRange.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** *

@@ -35,7 +34,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface PositionedByteRange extends ByteRange { // net new API is here. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReadOnlyByteRangeException.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReadOnlyByteRangeException.java index c14f1e2..2962c28 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReadOnlyByteRangeException.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReadOnlyByteRangeException.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception thrown when a read only byte range is modified */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ReadOnlyByteRangeException extends UnsupportedOperationException { public ReadOnlyByteRangeException() { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleByteRange.java index 4d5e5b5..3b157f8 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleByteRange.java @@ -18,13 +18,11 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A read only version of the {@link ByteRange}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class SimpleByteRange extends AbstractByteRange { public SimpleByteRange() { } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMutableByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMutableByteRange.java index d325649..3b32be0 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMutableByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimpleMutableByteRange.java @@ -51,13 +51,11 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A basic mutable {@link ByteRange} implementation. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class SimpleMutableByteRange extends AbstractByteRange { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedByteRange.java index 2265f24..2f4da00 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedByteRange.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Extends the basic {@link SimpleMutableByteRange} implementation with position @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * {@link #equals(Object)}. {@code Position} is retained by copy operations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving @edu.umd.cs.findbugs.annotations.SuppressWarnings("EQ_DOESNT_OVERRIDE_EQUALS") public class SimplePositionedByteRange extends AbstractPositionedByteRange { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedMutableByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedMutableByteRange.java index b3614bb..5fe53db 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedMutableByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/SimplePositionedMutableByteRange.java @@ -51,7 +51,6 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Extends the basic {@link AbstractPositionedByteRange} implementation with @@ -61,7 +60,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; * {@link #equals(Object)}. {@code Position} is retained by copy operations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving @edu.umd.cs.findbugs.annotations.SuppressWarnings("EQ_DOESNT_OVERRIDE_EQUALS") public class SimplePositionedMutableByteRange extends AbstractPositionedByteRange { /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java index 908689c..44bb89f 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java @@ -25,13 +25,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.Version; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * This class finds the Version information for HBase. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class VersionInfo { private static final Log LOG = LogFactory.getLog(VersionInfo.class.getName()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index fcad895..e191046 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -28,14 +28,12 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Common helpers for testing HBase that do not depend on specific server/etc. things. * {@see org.apache.hadoop.hbase.HBaseTestingUtility} */ @InterfaceAudience.Public -@InterfaceStability.Unstable public class HBaseCommonTestingUtility { protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class); diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java index 810778b..7760bdc 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java @@ -837,4 +837,4 @@ public class AggregationClient implements Closeable { } return bytes; } -} \ No newline at end of file +} diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.java index 30f3d30..82764b5 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.java @@ -33,7 +33,6 @@ import java.util.concurrent.CompletableFuture; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.RawAsyncTable; import org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallback; import org.apache.hadoop.hbase.client.RawScanResultConsumer; @@ -52,7 +51,6 @@ import org.apache.hadoop.hbase.util.ReflectionUtils; * summing/processing the individual results obtained from the AggregateService for each region. */ @InterfaceAudience.Public -@InterfaceStability.Unstable public class AsyncAggregationClient { private static abstract class AbstractAggregationCallback diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java index ea8bc9c..3326f2f 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/Constants.java @@ -19,13 +19,11 @@ package org.apache.hadoop.hbase.rest; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Common constants for org.apache.hadoop.hbase.rest */ @InterfaceAudience.Public -@InterfaceStability.Stable public interface Constants { // All constants in a public interface are 'public static final' diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java index c9cf49a..4a89aa8 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java @@ -32,7 +32,6 @@ import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; @@ -53,7 +52,6 @@ import org.apache.http.util.EntityUtils; * semantics for interacting with the REST gateway. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Client { public static final Header[] EMPTY_HEADER_ARRAY = new Header[0]; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Cluster.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Cluster.java index 549a1b2..6189ed6 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Cluster.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Cluster.java @@ -24,14 +24,12 @@ import java.util.Collections; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * A list of 'host:port' addresses of HTTP servers operating as a single * entity, for example multiple redundant web service gateways. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Cluster { protected List nodes = Collections.synchronizedList(new ArrayList()); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteAdmin.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteAdmin.java index de1e23f..55eeba5 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteAdmin.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteAdmin.java @@ -31,7 +31,6 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HTableDescriptor; @@ -44,7 +43,6 @@ import org.apache.hadoop.hbase.rest.model.VersionModel; import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public -@InterfaceStability.Stable public class RemoteAdmin { final Client client; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index 9cc3198..5012a5a 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -42,7 +42,6 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; @@ -79,7 +78,6 @@ import com.google.protobuf.ServiceException; * HTable interface to remote tables accessed via REST gateway */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RemoteHTable implements Table { private static final Log LOG = LogFactory.getLog(RemoteHTable.class); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java index 8998e57..86a92bf 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java @@ -25,7 +25,6 @@ import java.io.InputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.http.Header; import org.apache.http.HttpResponse; @@ -33,7 +32,6 @@ import org.apache.http.HttpResponse; * The HTTP result code, response headers, and body of a HTTP response. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Response { private static final Log LOG = LogFactory.getLog(Response.class); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java index 7224383..013da3f 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java @@ -48,7 +48,6 @@ import org.apache.hadoop.conf.Configuration; * attempt as a bad request. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class RestCsrfPreventionFilter implements Filter { private static final Log LOG = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java index 255ca31..b04e685 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java @@ -27,7 +27,6 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; @@ -58,7 +57,6 @@ import org.apache.hadoop.hbase.util.JVMClusterUtil; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class LocalHBaseCluster { private static final Log LOG = LogFactory.getLog(LocalHBaseCluster.class); private final List masterThreads = new CopyOnWriteArrayList<>(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java index 49a718c..42d40eb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/TableSnapshotScanner.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.snapshot.RestoreSnapshotHelper; import org.apache.hadoop.hbase.util.FSUtils; @@ -65,7 +64,6 @@ import org.apache.hadoop.hbase.util.FSUtils; * @see org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TableSnapshotScanner extends AbstractClientScanner { private static final Log LOG = LogFactory.getLog(TableSnapshotScanner.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java index c141c3e..e1dc7f9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/locking/EntityLock.java @@ -28,7 +28,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse; @@ -81,7 +80,6 @@ import com.google.common.annotations.VisibleForTesting; * */ @InterfaceAudience.Public -@InterfaceStability.Stable public class EntityLock { private static final Log LOG = LogFactory.getLog(EntityLock.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java index a00ccd9..bff7e8f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java @@ -22,7 +22,6 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage; import org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage; import org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage; @@ -41,7 +40,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.ErrorHandlingProtos.Sta * ProxyThrowables are generated are by this class's {@link #deserialize(byte[])} method. */ @InterfaceAudience.Public -@InterfaceStability.Evolving @SuppressWarnings("serial") public class ForeignException extends IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutException.java index 746c59b..bd2adf3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutException.java @@ -18,14 +18,12 @@ package org.apache.hadoop.hbase.errorhandling; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Exception for timeout of a task. * @see TimeoutExceptionInjector */ @InterfaceAudience.Public -@InterfaceStability.Evolving @SuppressWarnings("serial") public class TimeoutException extends Exception { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java index e1ca999..a534224 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; @@ -39,7 +38,6 @@ import org.apache.hadoop.mapred.Reporter; * Extract grouping columns from input record */ @InterfaceAudience.Public -@InterfaceStability.Stable public class GroupingTableMap extends MapReduceBase implements TableMap { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java index be131e8..0011a60 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java @@ -25,7 +25,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; @@ -44,7 +43,6 @@ import org.apache.hadoop.mapred.Partitioner; * @param */ @InterfaceAudience.Public -@InterfaceStability.Stable public class HRegionPartitioner implements Partitioner { private static final Log LOG = LogFactory.getLog(HRegionPartitioner.class); @@ -95,4 +93,4 @@ implements Partitioner { // if above fails to find start key that match we need to return something return 0; } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java index 2f5a9b8..dfacff9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.mapred.JobConf; @@ -33,7 +32,6 @@ import org.apache.hadoop.mapred.Reporter; * Pass the given key and record as-is to reduce */ @InterfaceAudience.Public -@InterfaceStability.Stable public class IdentityTableMap extends MapReduceBase implements TableMap { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java index 8d4d301..9c2e604 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java @@ -24,7 +24,6 @@ import java.util.Iterator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.mapred.MapReduceBase; @@ -35,7 +34,6 @@ import org.apache.hadoop.mapred.Reporter; * Write to table each key, record pair */ @InterfaceAudience.Public -@InterfaceStability.Stable public class IdentityTableReduce extends MapReduceBase implements TableReduce { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/MultiTableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/MultiTableSnapshotInputFormat.java index ab27edd..3e121fe 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/MultiTableSnapshotInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/MultiTableSnapshotInputFormat.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapred; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -83,7 +82,6 @@ import java.util.Map; * @see org.apache.hadoop.hbase.client.TableSnapshotScanner */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MultiTableSnapshotInputFormat extends TableSnapshotInputFormat implements InputFormat { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java index fd9a60c..43560fd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -41,7 +40,6 @@ import org.apache.hadoop.util.ToolRunner; * Uses a org.apache.hadoop.mapred.lib.IdentityReducer */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RowCounter extends Configured implements Tool { // Name of this 'program' static final String NAME = "rowcounter"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java index 29763f3..208849a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; @@ -38,7 +37,6 @@ import org.apache.hadoop.util.StringUtils; * Convert HBase tabular data into a format that is consumable by Map/Reduce. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableInputFormat extends TableInputFormatBase implements JobConfigurable { private static final Log LOG = LogFactory.getLog(TableInputFormat.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java index ccd8417..c65810f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java @@ -24,7 +24,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; @@ -78,7 +77,6 @@ import org.apache.hadoop.mapred.Reporter; */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class TableInputFormatBase implements InputFormat { private static final Log LOG = LogFactory.getLog(TableInputFormatBase.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMap.java index 3a57bc3..a9f1e61 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMap.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapred; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.io.WritableComparable; @@ -33,7 +32,6 @@ import org.apache.hadoop.mapred.Mapper; * @param Writable value class */ @InterfaceAudience.Public -@InterfaceStability.Stable public interface TableMap, V> extends Mapper { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java index 73340c4..63ec418 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java @@ -23,7 +23,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; @@ -49,7 +48,6 @@ import java.util.Map; * Utility for {@link TableMap} and {@link TableReduce} */ @InterfaceAudience.Public -@InterfaceStability.Stable @SuppressWarnings({ "rawtypes", "unchecked" }) public class TableMapReduceUtil { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java index 3fe5a90..eb4b66f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java @@ -24,7 +24,6 @@ import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -41,7 +40,6 @@ import org.apache.hadoop.util.Progressable; * Convert Map/Reduce output and write it to an HBase table */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableOutputFormat extends FileOutputFormat { /** JobConf parameter that specifies the output table */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java index 281d13e..cecef7d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; @@ -33,7 +32,6 @@ import org.apache.hadoop.mapred.RecordReader; * Iterate over an HBase table data, return (Text, RowResult) pairs */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableRecordReader implements RecordReader { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java index 2ace8e2..f6b79c3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -43,7 +42,6 @@ import static org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.LOG_PER_RO * Iterate over an HBase table data, return (Text, RowResult) pairs */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableRecordReaderImpl { private static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableReduce.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableReduce.java index 2ca7cf8..91fb4a1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableReduce.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableReduce.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapred; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.io.WritableComparable; @@ -32,7 +31,6 @@ import org.apache.hadoop.mapred.Reducer; * @param value class */ @InterfaceAudience.Public -@InterfaceStability.Stable @SuppressWarnings("unchecked") public interface TableReduce extends Reducer { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSnapshotInputFormat.java index a5c62b2..d7b49ff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSnapshotInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSnapshotInputFormat.java @@ -22,7 +22,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -45,7 +44,6 @@ import java.util.List; * @see org.apache.hadoop.hbase.mapreduce.TableSnapshotInputFormat */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TableSnapshotInputFormat implements InputFormat { public static class TableSnapshotRegionSplit implements InputSplit { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSplit.java index 237fe47..0784e5e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableSplit.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.Arrays; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.util.Bytes; @@ -34,7 +33,6 @@ import org.apache.hadoop.mapred.InputSplit; * A table split corresponds to a key range [low, high) */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableSplit implements InputSplit, Comparable { private TableName m_tableName; private byte [] m_startRow; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index d39efaa..9a8911e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -24,7 +24,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -71,7 +70,6 @@ import com.google.common.base.Preconditions; * start time and/or end time to limit the count to a time range. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class CellCounter extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(CellCounter.class.getName()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCreator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCreator.java index 001f64d..1d4d37b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCreator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCreator.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; @@ -32,7 +31,6 @@ import org.apache.hadoop.util.ReflectionUtils; * Facade to create Cells for HFileOutputFormat. The created Cells are of Put type. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class CellCreator { public static final String VISIBILITY_EXP_RESOLVER_CLASS = diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java index 8f0504a..c0d809b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -48,7 +47,6 @@ import org.apache.hadoop.util.ToolRunner; * of the region server implementation if different from the local cluster. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class CopyTable extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(CopyTable.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java index 3a3988e..4c01528 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; @@ -50,7 +49,6 @@ import org.apache.hadoop.util.ToolRunner; * back in again. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Export extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(Export.class); final static String NAME = "export"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java index 44e43c8..dc30c6e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.ArrayList; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; @@ -38,7 +37,6 @@ import org.apache.hadoop.mapreduce.Job; * Extract grouping columns from input record. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class GroupingTableMapper extends TableMapper implements Configurable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 1ce5f60..5b1f13c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; @@ -92,7 +91,6 @@ import com.google.common.annotations.VisibleForTesting; * using {@link #configureIncrementalLoad(Job, HTableDescriptor, RegionLocator, Class)}. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class HFileOutputFormat2 extends FileOutputFormat { private static final Log LOG = LogFactory.getLog(HFileOutputFormat2.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java index 4c75b56..3475a48 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -50,7 +49,6 @@ import org.apache.hadoop.mapreduce.Partitioner; * @param The type of the value. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class HRegionPartitioner extends Partitioner implements Configurable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java index fdf351e..7103ef8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -31,7 +30,6 @@ import org.apache.hadoop.mapreduce.Job; * Pass the given key and record as-is to the reduce phase. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class IdentityTableMapper extends TableMapper { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java index ec3192e..5289f46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.io.Writable; @@ -50,7 +49,6 @@ import org.apache.hadoop.io.Writable; * row and columns implicitly. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class IdentityTableReducer extends TableReducer { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index d1beb8d..3c72c2b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -84,7 +83,6 @@ import org.apache.zookeeper.KeeperException; * Import data written by {@link Export}. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class Import extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(Import.class); final static String NAME = "import"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java index a379d53..aa7b129 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; @@ -74,7 +73,6 @@ import com.google.common.collect.Lists; * @see ImportTsv#usage(String) */ @InterfaceAudience.Public -@InterfaceStability.Stable public class ImportTsv extends Configured implements Tool { protected static final Log LOG = LogFactory.getLog(ImportTsv.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java index e85ef58..241608b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSerialization.java @@ -26,13 +26,11 @@ import java.io.OutputStream; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; @InterfaceAudience.Public -@InterfaceStability.Evolving public class KeyValueSerialization implements Serialization { @Override public boolean accept(Class c) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java index d37ab94..5c7ace2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.util.TreeSet; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -35,7 +34,6 @@ import org.apache.hadoop.mapreduce.Reducer; * @see HFileOutputFormat2 */ @InterfaceAudience.Public -@InterfaceStability.Stable public class KeyValueSortReducer extends Reducer { protected void reduce(ImmutableBytesWritable row, java.lang.Iterable kvs, org.apache.hadoop.mapreduce.Reducer.Context context) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java index 80dfd66..19daeed 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java @@ -67,7 +67,6 @@ import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ClientServiceCallable; import org.apache.hadoop.hbase.client.Connection; @@ -108,7 +107,6 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; * Tool to load the output of HFileOutputFormat into an existing table. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class LoadIncrementalHFiles extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(LoadIncrementalHFiles.class); private boolean initalized = false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiHFileOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiHFileOutputFormat.java index dc2fc0d..3c90b59 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiHFileOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiHFileOutputFormat.java @@ -16,7 +16,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -44,7 +43,6 @@ import com.google.common.annotations.VisibleForTesting; *

*/ @InterfaceAudience.Public -@InterfaceStability.Evolving @VisibleForTesting public class MultiHFileOutputFormat extends FileOutputFormat { private static final Log LOG = LogFactory.getLog(MultiHFileOutputFormat.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java index 3099c0d..a8e6837 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java @@ -22,7 +22,6 @@ import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Scan; @@ -55,7 +54,6 @@ import org.apache.hadoop.hbase.client.Scan; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MultiTableInputFormat extends MultiTableInputFormatBase implements Configurable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java index 25ea047..e18b3aa 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java @@ -25,7 +25,6 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; @@ -54,7 +53,6 @@ import java.util.Iterator; * filters etc. Subclasses may use other TableRecordReader implementations. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public abstract class MultiTableInputFormatBase extends InputFormat { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java index 7feb7a9..4cc784f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java @@ -25,7 +25,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; @@ -61,7 +60,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; *

*/ @InterfaceAudience.Public -@InterfaceStability.Stable public class MultiTableOutputFormat extends OutputFormat { /** Set this to {@link #WAL_OFF} to turn off write-ahead logging (WAL) */ public static final String WAL_PROPERTY = "hbase.mapreduce.multitableoutputformat.wal"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java index bd530c8..6ba8138 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableSnapshotInputFormat.java @@ -22,7 +22,6 @@ import com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; @@ -78,7 +77,6 @@ import java.util.Map; * @see org.apache.hadoop.hbase.client.TableSnapshotScanner */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MultiTableSnapshotInputFormat extends TableSnapshotInputFormat { private final MultiTableSnapshotInputFormatImpl delegate; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java index fef275b..8997da9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MutationSerialization.java @@ -22,7 +22,6 @@ import java.io.InputStream; import java.io.OutputStream; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; @@ -34,7 +33,6 @@ import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; @InterfaceAudience.Public -@InterfaceStability.Evolving public class MutationSerialization implements Serialization { @Override public boolean accept(Class c) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java index 1a0cfdb..f01e84f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutCombiner.java @@ -26,7 +26,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; @@ -39,7 +38,6 @@ import org.apache.hadoop.mapreduce.Reducer; * @see TableMapReduceUtil */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class PutCombiner extends Reducer { private static final Log LOG = LogFactory.getLog(PutCombiner.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java index b48580d..17ab9cb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java @@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -52,7 +51,6 @@ import org.apache.hadoop.util.StringUtils; * @see KeyValueSortReducer */ @InterfaceAudience.Public -@InterfaceStability.Stable public class PutSortReducer extends Reducer { // the cell creator diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java index 98c92ea..dff04b6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/ResultSerialization.java @@ -32,7 +32,6 @@ import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; @@ -42,7 +41,6 @@ import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.Serializer; @InterfaceAudience.Public -@InterfaceStability.Evolving public class ResultSerialization extends Configured implements Serialization { private static final Log LOG = LogFactory.getLog(ResultSerialization.class); // The following configuration property indicates import file format version. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java index 46d29eb..2e0591e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java @@ -27,7 +27,6 @@ import org.apache.commons.logging.LogFactory; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -49,7 +48,6 @@ import org.apache.hadoop.util.ToolRunner; * input row has columns that have content. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class RowCounter extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(RowCounter.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java index 2257054..4ba1088 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapreduce; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -45,7 +44,6 @@ import org.apache.hadoop.mapreduce.Partitioner; * @see #END */ @InterfaceAudience.Public -@InterfaceStability.Stable public class SimpleTotalOrderPartitioner extends Partitioner implements Configurable { private final static Log LOG = LogFactory.getLog(SimpleTotalOrderPartitioner.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index 4f6b307..63868da 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -30,7 +30,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; @@ -46,7 +45,6 @@ import org.apache.hadoop.util.StringUtils; * Convert HBase tabular data into a format that is consumable by Map/Reduce. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableInputFormat extends TableInputFormatBase implements Configurable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index 7962a42..ce1928e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -32,7 +32,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; @@ -97,7 +96,6 @@ import org.apache.hadoop.util.StringUtils; * */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class TableInputFormatBase extends InputFormat { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java index 69b486d..e6a69ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java @@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Put; @@ -69,7 +68,6 @@ import com.codahale.metrics.MetricRegistry; */ @SuppressWarnings({ "rawtypes", "unchecked" }) @InterfaceAudience.Public -@InterfaceStability.Stable public class TableMapReduceUtil { private static final Log LOG = LogFactory.getLog(TableMapReduceUtil.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java index cde94fe..9a7dcb7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapreduce; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.mapreduce.Mapper; @@ -33,8 +32,7 @@ import org.apache.hadoop.mapreduce.Mapper; * @see org.apache.hadoop.mapreduce.Mapper */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class TableMapper extends Mapper { -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java index 9ccde9a..749fd85 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.TaskAttemptContext; @@ -30,7 +29,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; * Small committer class that does not do anything. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableOutputCommitter extends OutputCommitter { @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java index 3b5ef8b..615999f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java @@ -23,7 +23,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -50,7 +49,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; * {@link Delete} instance. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableOutputFormat extends OutputFormat implements Configurable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java index 2b480fe..f66520b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; @@ -35,7 +34,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; * pairs. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableRecordReader extends RecordReader { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java index a8ed5f1..e646727 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java @@ -24,7 +24,6 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; @@ -47,7 +46,6 @@ import com.google.common.annotations.VisibleForTesting; * pairs. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TableRecordReaderImpl { public static final String LOG_PER_ROW_COUNT = "hbase.mapreduce.log.scanner.rowcount"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java index d09311a..f0bfc74 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.mapreduce; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.mapreduce.Reducer; @@ -41,7 +40,6 @@ import org.apache.hadoop.mapreduce.Reducer; * @see org.apache.hadoop.mapreduce.Reducer */ @InterfaceAudience.Public -@InterfaceStability.Stable public abstract class TableReducer extends Reducer { } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java index b2db319..15d403f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSnapshotInputFormat.java @@ -23,7 +23,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; @@ -82,7 +81,6 @@ import java.util.List; * @see org.apache.hadoop.hbase.client.TableSnapshotScanner */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TableSnapshotInputFormat extends InputFormat { public static class TableSnapshotRegionSplit extends InputSplit implements Writable { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java index 1795909..13c7c67 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java @@ -26,7 +26,6 @@ import java.util.Arrays; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Scan; @@ -40,7 +39,6 @@ import org.apache.hadoop.mapreduce.InputSplit; * All references to row below refer to the key of the row. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TableSplit extends InputSplit implements Writable, Comparable { /** @deprecated LOG variable would be made private. fix in hbase 3.0 */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java index 05a4820..84324e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.security.visibility.InvalidLabelException; import org.apache.hadoop.hbase.util.Base64; @@ -50,7 +49,6 @@ import org.apache.hadoop.util.StringUtils; * @see PutSortReducer */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TextSortReducer extends Reducer { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java index 08b5aab..a9d8e03 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException; @@ -45,7 +44,6 @@ import org.apache.hadoop.mapreduce.Mapper; * Write table content out to files in hdfs. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class TsvImporterMapper extends Mapper { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java index 7744ea7..581f0d0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterTextMapper.java @@ -25,7 +25,6 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import java.io.IOException; @@ -34,7 +33,6 @@ import java.io.IOException; * Write table content out to map output files. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TsvImporterTextMapper extends Mapper { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/VisibilityExpressionResolver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/VisibilityExpressionResolver.java index 48d7708..a83a88f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/VisibilityExpressionResolver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/VisibilityExpressionResolver.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hbase.Tag; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.Tag; * Interface to convert visibility expressions into Tags for storing along with Cells in HFiles. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface VisibilityExpressionResolver extends Configurable { /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java index d16dcf5..1e04275 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; @@ -66,7 +65,6 @@ import org.apache.hadoop.util.ToolRunner; * in that case the WAL is replayed for a single table only. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class WALPlayer extends Configured implements Tool { private static final Log LOG = LogFactory.getLog(WALPlayer.class); final static String NAME = "WALPlayer"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java index 2892cb2..c76d02a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobConstants.java @@ -23,14 +23,12 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.util.Bytes; /** * The constants used in mob. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class MobConstants { public static final String MOB_SCAN_RAW = "hbase.mob.scan.raw"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowTooBigException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowTooBigException.java index 4a408e7..a4411e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowTooBigException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RowTooBigException.java @@ -35,4 +35,4 @@ public class RowTooBigException extends org.apache.hadoop.hbase.client.RowTooBig public RowTooBigException(String message) { super(message); } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java index 1d42450..6127d5b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; @@ -50,7 +49,6 @@ import org.apache.zookeeper.KeeperException; * Utility methods for obtaining authentication tokens. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class TokenUtil { // This class is referenced indirectly by User out in common; instances are created by reflection private static final Log LOG = LogFactory.getLog(TokenUtil.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java index e2425a6..6808545 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.security.visibility; import java.util.List; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hbase.security.User; @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.security.User; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface ScanLabelGenerator extends Configurable { /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java index a720127..8366fce 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.security.visibility; import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; /** @@ -30,7 +29,6 @@ import org.apache.hadoop.hbase.Cell; * read results. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface VisibilityExpEvaluator { /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelService.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelService.java index 8ddd47e..5c8f0cc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelService.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityLabelService.java @@ -24,7 +24,6 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagType; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.OperationStatus; import org.apache.hadoop.hbase.security.User; @@ -34,7 +33,6 @@ import org.apache.hadoop.hbase.security.User; * visibility expression storage part and read time evaluation. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public interface VisibilityLabelService extends Configurable { /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java index efae7e4..e2086e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java @@ -36,7 +36,6 @@ import org.apache.commons.cli.Option; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -87,7 +86,6 @@ import org.apache.hadoop.util.Tool; * When everything is done, the second cluster can restore the snapshot. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class ExportSnapshot extends AbstractHBaseTool implements Tool { public static final String NAME = "exportsnapshot"; /** Configuration prefix for overrides for the source filesystem */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java index 6dbd3f0..eb96438 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java @@ -41,7 +41,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.SnapshotDescription; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.TableName; @@ -66,7 +65,6 @@ import org.apache.hadoop.hbase.util.FSUtils; * */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class SnapshotInfo extends AbstractHBaseTool { private static final Log LOG = LogFactory.getLog(SnapshotInfo.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java index efb3170..47cb394 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ConfigurationUtil.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.util; import com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.util.StringUtils; import java.util.AbstractMap; @@ -34,7 +33,6 @@ import java.util.Map; * {@link org.apache.hadoop.conf.Configuration} instances. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public final class ConfigurationUtil { // TODO: hopefully this is a good delimiter; it's not in the base64 alphabet, // nor is it valid for paths diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java index 6692ee8..1efab58 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/EncryptionTest.java @@ -27,7 +27,6 @@ import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider; @@ -36,7 +35,6 @@ import org.apache.hadoop.hbase.io.crypto.KeyStoreKeyProvider; import org.apache.hadoop.hbase.security.EncryptionUtil; @InterfaceAudience.Public -@InterfaceStability.Evolving public class EncryptionTest { private static final Log LOG = LogFactory.getLog(EncryptionTest.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/LeaseNotRecoveredException.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/LeaseNotRecoveredException.java index ca769b8..152cd8e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/LeaseNotRecoveredException.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/LeaseNotRecoveredException.java @@ -20,14 +20,12 @@ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; /** * Thrown when the lease was expected to be recovered, * but the file can't be opened. */ @InterfaceAudience.Public -@InterfaceStability.Stable public class LeaseNotRecoveredException extends HBaseIOException { public LeaseNotRecoveredException() { super(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java index 7dea269..c6f22ff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java @@ -35,7 +35,6 @@ import java.util.Random; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; import org.apache.zookeeper.server.NIOServerCnxnFactory; @@ -50,7 +49,6 @@ import com.google.common.annotations.VisibleForTesting; * easily access testing helper objects. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MiniZooKeeperCluster { private static final Log LOG = LogFactory.getLog(MiniZooKeeperCluster.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index 696ea18..82c2eab 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -61,7 +61,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Waiter.ExplainingPredicate; import org.apache.hadoop.hbase.Waiter.Predicate; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.Connection; @@ -157,7 +156,6 @@ import edu.umd.cs.findbugs.annotations.Nullable; * setting it to true. */ @InterfaceAudience.Public -@InterfaceStability.Evolving @SuppressWarnings("deprecation") public class HBaseTestingUtility extends HBaseCommonTestingUtility { private MiniZooKeeperCluster zkCluster = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java index 55529c6..c0efc7b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java @@ -26,7 +26,6 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hbase.master.HMaster; @@ -51,7 +50,6 @@ import org.apache.hadoop.hbase.util.Threads; * each and will close down their instance on the way out. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class MiniHBaseCluster extends HBaseCluster { private static final Log LOG = LogFactory.getLog(MiniHBaseCluster.class.getName()); public LocalHBaseCluster hbaseCluster; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index c1b99f2..6e48ba8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -27,7 +27,6 @@ import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; -import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; @@ -42,7 +41,6 @@ import org.apache.hadoop.hbase.util.Bytes; * Do basic codec performance eval. */ @InterfaceAudience.Public -@InterfaceStability.Evolving public class CodecPerformance { /** @deprecated LOG variable would be made private. since 1.2, remove in 3.0 */ @Deprecated -- 2.7.4