pair = getRegion(nameOfRegionA);
if (pair != null && pair.getFirst().getReplicaId() != HRegionInfo.DEFAULT_REPLICA_ID)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
index f37690ca70..3d3ad33589 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java
@@ -23,6 +23,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
@@ -124,8 +125,7 @@ class HConnectionKey {
for (String property : CONNECTION_PROPERTIES) {
String thisValue = this.properties.get(property);
String thatValue = that.properties.get(property);
- //noinspection StringEquality
- if (thisValue == thatValue) {
+ if (Objects.equals(thisValue, thatValue)) {
continue;
}
if (thisValue == null || !thisValue.equals(thatValue)) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index e9531f32a4..5fb9e63e4b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -1808,6 +1808,7 @@ public class HTable implements HTableInterface, RegionLocator {
return getKeysAndRegionsInRange(start, end, true).getFirst();
}
+ @Override
public void setOperationTimeout(int operationTimeout) {
this.operationTimeout = operationTimeout;
if (mutator != null) {
@@ -1816,6 +1817,7 @@ public class HTable implements HTableInterface, RegionLocator {
multiAp.setOperationTimeout(operationTimeout);
}
+ @Override
public int getOperationTimeout() {
return operationTimeout;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
index 7b2b1368e7..21e3ce67a1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java
@@ -468,7 +468,7 @@ public class HTableMultiplexer {
}
public long getTotalBufferedCount() {
- return queue.size() + currentProcessingCount.get();
+ return (long) queue.size() + currentProcessingCount.get();
}
public AtomicAverageCounter getAverageLatencyCounter() {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
index 502703bde1..d2193067b7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
@@ -308,6 +308,7 @@ public class HTablePool implements Closeable {
*
* Note: this is a 'shutdown' of all the table pools.
*/
+ @Override
public void close() throws IOException {
for (String tableName : tables.keySet()) {
closeTablePool(tableName);
@@ -524,6 +525,7 @@ public class HTablePool implements Closeable {
*
* @throws IOException
*/
+ @Override
public void close() throws IOException {
checkState();
open = false;
@@ -635,7 +637,8 @@ public class HTablePool implements Closeable {
private void checkState() {
if (!isOpen()) {
- throw new IllegalStateException("Table=" + new String(table.getTableName()) + " already closed");
+ throw new IllegalStateException("Table=" + table.getName().getNameAsString()
+ + " already closed");
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
index 48f2fd604e..76759b763c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.ClassSize;
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Increment extends Mutation implements Comparable {
- private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE;
+ private static final long HEAP_OVERHEAD = (long) ClassSize.REFERENCE + ClassSize.TIMERANGE;
private TimeRange tr = new TimeRange();
/**
@@ -164,6 +164,7 @@ public class Increment extends Mutation implements Comparable {
* client that is not interested in the result can save network bandwidth setting this
* to false.
*/
+ @Override
public Increment setReturnResults(boolean returnResults) {
super.setReturnResults(returnResults);
return this;
@@ -173,6 +174,7 @@ public class Increment extends Mutation implements Comparable {
* @return current setting for returnResults
*/
// This method makes public the superclasses's protected method.
+ @Override
public boolean isReturnResults() {
return super.isReturnResults();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
index 126b1175a8..d4a2d85fcc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
@InterfaceStability.Stable
public class NoServerForRegionException extends DoNotRetryRegionException {
- private static final long serialVersionUID = 1L << 11 - 1L;
+ private static final long serialVersionUID = 1L << (11 - 1L);
/** default constructor */
public NoServerForRegionException() {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
index 7ac4546f64..64b44a9a90 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java
@@ -38,10 +38,12 @@ public class PerClientRandomNonceGenerator implements NonceGenerator {
this.clientId = (((long)Arrays.hashCode(clientIdBase)) << 32) + rdm.nextInt();
}
+ @Override
public long getNonceGroup() {
return this.clientId;
}
+ @Override
public long newNonce() {
long result = HConstants.NO_NONCE;
do {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
index d7aa73900d..789e2e1447 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java
@@ -38,7 +38,7 @@ class RegistryFactory {
ZooKeeperRegistry.class.getName());
Registry registry = null;
try {
- registry = (Registry)Class.forName(registryClass).newInstance();
+ registry = (Registry)Class.forName(registryClass).getDeclaredConstructor().newInstance();
} catch (Throwable t) {
throw new IOException(t);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
index b03595a2d6..af24d5c0ca 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java
@@ -66,6 +66,7 @@ public class RpcRetryingCaller {
private final long pauseForCQTBE;
private final int retries;
private final int rpcTimeout;// timeout for each rpc request
+ private final Object lock = new Object();
private final AtomicBoolean cancelled = new AtomicBoolean(false);
private final RetryingCallerInterceptor interceptor;
private final RetryingCallerInterceptorContext context;
@@ -105,16 +106,16 @@ public class RpcRetryingCaller {
private int getTimeout(int callTimeout){
int timeout = getRemainingTime(callTimeout);
- if (timeout <= 0 || rpcTimeout > 0 && rpcTimeout < timeout){
+ if (timeout <= 0 || (rpcTimeout > 0 && rpcTimeout < timeout)){
timeout = rpcTimeout;
}
return timeout;
}
public void cancel(){
- synchronized (cancelled){
+ synchronized (lock){
cancelled.set(true);
- cancelled.notifyAll();
+ lock.notifyAll();
}
}
@@ -181,9 +182,9 @@ public class RpcRetryingCaller {
}
try {
if (expectedSleep > 0) {
- synchronized (cancelled) {
+ synchronized (lock) {
if (cancelled.get()) return null;
- cancelled.wait(expectedSleep);
+ lock.wait(expectedSleep);
}
}
if (cancelled.get()) return null;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 1340602358..541b3d8f1d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -871,6 +871,7 @@ public class Scan extends Query {
return allowPartialResults;
}
+ @Override
public Scan setLoadColumnFamiliesOnDemand(boolean value) {
return (Scan) super.setLoadColumnFamiliesOnDemand(value);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
index 3ea3802d7e..5e1d16261a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.util.Bytes;
public class LongColumnInterpreter extends ColumnInterpreter {
+ @Override
public Long getValue(byte[] colFamily, byte[] colQualifier, Cell kv)
throws IOException {
if (kv == null || kv.getValueLength() != Bytes.SIZEOF_LONG)
@@ -49,7 +50,7 @@ public class LongColumnInterpreter extends ColumnInterpreter clazz = Class.forName(filter);
+ Object o = clazz.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new ReplicationException("Configured WALEntryFilter " + filter +
" could not be created. Failing add/update " + "peer operation.", e);
@@ -783,12 +784,12 @@ public class ReplicationAdmin implements Closeable {
* @see java.lang.Object#equals(java.lang.Object)
*/
private boolean compareForReplication(HTableDescriptor peerHtd, HTableDescriptor localHtd) {
- if (peerHtd == localHtd) {
- return true;
- }
if (peerHtd == null) {
return false;
}
+ if (peerHtd.equals(localHtd)) {
+ return true;
+ }
boolean result = false;
// Create a copy of peer HTD as we need to change its replication
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
index 1847b2ecd7..5fc97ba7b7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java
@@ -34,7 +34,7 @@ public enum SecurityCapability {
CELL_AUTHORIZATION(3),
CELL_VISIBILITY(4);
- private int value;
+ private final int value;
public int getValue() {
return value;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
index d5c2613f5f..09fe7e1ed5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
@@ -50,6 +50,7 @@ public class BinaryComparator extends ByteArrayComparable {
/**
* @return The comparator serialized using pb
*/
+ @Override
public byte [] toByteArray() {
ComparatorProtos.BinaryComparator.Builder builder =
ComparatorProtos.BinaryComparator.newBuilder();
@@ -79,6 +80,7 @@ public class BinaryComparator extends ByteArrayComparable {
* @return true if and only if the fields of the comparator that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (!(other instanceof BinaryComparator)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
index c05eb8f42f..366b2f9c0b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
@@ -52,6 +52,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
/**
* @return The comparator serialized using pb
*/
+ @Override
public byte [] toByteArray() {
ComparatorProtos.BinaryPrefixComparator.Builder builder =
ComparatorProtos.BinaryPrefixComparator.newBuilder();
@@ -81,6 +82,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
* @return true if and only if the fields of the comparator that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (!(other instanceof BinaryPrefixComparator)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
index 0b7c52db9b..07af22f91a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
@@ -66,6 +66,7 @@ public class BitComparator extends ByteArrayComparable {
/**
* @return The comparator serialized using pb
*/
+ @Override
public byte [] toByteArray() {
ComparatorProtos.BitComparator.Builder builder =
ComparatorProtos.BitComparator.newBuilder();
@@ -99,6 +100,7 @@ public class BitComparator extends ByteArrayComparable {
* @return true if and only if the fields of the comparator that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (!(other instanceof BitComparator)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index 18f49f6d45..d2f6ec84d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -84,6 +84,7 @@ public class ColumnCountGetFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.ColumnCountGetFilter.Builder builder =
FilterProtos.ColumnCountGetFilter.newBuilder();
@@ -113,6 +114,7 @@ public class ColumnCountGetFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof ColumnCountGetFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 6f297fbdc0..806863af07 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -174,6 +174,7 @@ public class ColumnPaginationFilter extends FilterBase
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.ColumnPaginationFilter.Builder builder =
FilterProtos.ColumnPaginationFilter.newBuilder();
@@ -213,6 +214,7 @@ public class ColumnPaginationFilter extends FilterBase
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof ColumnPaginationFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index 6a9e6e9799..eb0be8d62c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -99,6 +99,7 @@ public class ColumnPrefixFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.ColumnPrefixFilter.Builder builder =
FilterProtos.ColumnPrefixFilter.newBuilder();
@@ -128,6 +129,7 @@ public class ColumnPrefixFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof ColumnPrefixFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 9e6c90f923..9006f87dfc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -143,8 +143,8 @@ public class ColumnRangeFilter extends FilterBase {
int cmpMax = Bytes.compareTo(buffer, qualifierOffset, qualifierLength,
this.maxColumn, 0, this.maxColumn.length);
- if (this.maxColumnInclusive && cmpMax <= 0 ||
- !this.maxColumnInclusive && cmpMax < 0) {
+ if ((this.maxColumnInclusive && cmpMax <= 0) ||
+ (!this.maxColumnInclusive && cmpMax < 0)) {
return ReturnCode.INCLUDE;
}
@@ -177,6 +177,7 @@ public class ColumnRangeFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.ColumnRangeFilter.Builder builder =
FilterProtos.ColumnRangeFilter.newBuilder();
@@ -211,6 +212,7 @@ public class ColumnRangeFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof ColumnRangeFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
index 9987e23b42..b7595d5641 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
@@ -170,6 +170,7 @@ public abstract class CompareFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof CompareFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index 2843751db8..8582e38b1f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -217,6 +217,7 @@ public class DependentColumnFilter extends CompareFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.DependentColumnFilter.Builder builder =
FilterProtos.DependentColumnFilter.newBuilder();
@@ -268,6 +269,7 @@ public class DependentColumnFilter extends CompareFilter {
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof DependentColumnFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index f9722d3d89..9aa078c4bb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -82,6 +82,7 @@ public class FamilyFilter extends CompareFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.FamilyFilter.Builder builder =
FilterProtos.FamilyFilter.newBuilder();
@@ -121,6 +122,7 @@ public class FamilyFilter extends CompareFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof FamilyFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
index 3c6bcabd2b..812252cc7c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
@@ -142,6 +142,7 @@ public abstract class FilterBase extends Filter {
*
* {@inheritDoc}
*/
+ @Override
public Cell getNextCellHint(Cell currentKV) throws IOException {
// Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue).
// Thus to maintain compatibility we need to call the old version.
@@ -154,6 +155,7 @@ public abstract class FilterBase extends Filter {
*
* {@inheritDoc}
*/
+ @Override
public boolean isFamilyEssential(byte[] name) throws IOException {
return true;
}
@@ -171,6 +173,7 @@ public abstract class FilterBase extends Filter {
/**
* Return filter's info for debugging and logging purpose.
*/
+ @Override
public String toString() {
return this.getClass().getSimpleName();
}
@@ -178,6 +181,7 @@ public abstract class FilterBase extends Filter {
/**
* Return length 0 byte array for Filters that don't require special serialization
*/
+ @Override
public byte[] toByteArray() throws IOException {
return new byte[0];
}
@@ -189,6 +193,7 @@ public abstract class FilterBase extends Filter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter other) {
return true;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 0b39b56193..8345fcfdcd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -221,6 +221,7 @@ final public class FilterList extends Filter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() throws IOException {
FilterProtos.FilterList.Builder builder = FilterProtos.FilterList.newBuilder();
builder.setOperator(FilterProtos.FilterList.Operator.valueOf(operator.name()));
@@ -262,6 +263,7 @@ final public class FilterList extends Filter {
* @return true if and only if the fields of the filter that are serialized are equal to the
* corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter other) {
if (other == this) return true;
if (!(other instanceof FilterList)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
index 441852740d..71370ec1f1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java
@@ -54,6 +54,7 @@ final public class FilterWrapper extends Filter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() throws IOException {
FilterProtos.FilterWrapper.Builder builder =
FilterProtos.FilterWrapper.newBuilder();
@@ -181,6 +182,7 @@ final public class FilterWrapper extends Filter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof FilterWrapper)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index dafb485957..d18a1f8aaf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -42,6 +42,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
public FirstKeyOnlyFilter() {
}
+ @Override
public void reset() {
foundKV = false;
}
@@ -84,6 +85,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.FirstKeyOnlyFilter.Builder builder =
FilterProtos.FirstKeyOnlyFilter.newBuilder();
@@ -113,6 +115,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof FirstKeyOnlyFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index fc40982a1d..2f1c037f25 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -82,6 +82,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder =
FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder();
@@ -118,6 +119,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof FirstKeyValueMatchingQualifiersFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index 88fc17badc..d93d2342b8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -258,6 +258,7 @@ public class FuzzyRowFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() {
FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder();
for (Pair fuzzyData : fuzzyKeysData) {
@@ -466,45 +467,55 @@ public class FuzzyRowFilter extends FilterBase {
/** Abstracts directional comparisons based on scan direction. */
private enum Order {
ASC {
+ @Override
public boolean lt(int lhs, int rhs) {
return lhs < rhs;
}
+ @Override
public boolean gt(int lhs, int rhs) {
return lhs > rhs;
}
+ @Override
public byte inc(byte val) {
// TODO: what about over/underflow?
return (byte) (val + 1);
}
+ @Override
public boolean isMax(byte val) {
return val == (byte) 0xff;
}
+ @Override
public byte min() {
return 0;
}
},
DESC {
+ @Override
public boolean lt(int lhs, int rhs) {
return lhs > rhs;
}
+ @Override
public boolean gt(int lhs, int rhs) {
return lhs < rhs;
}
+ @Override
public byte inc(byte val) {
// TODO: what about over/underflow?
return (byte) (val - 1);
}
+ @Override
public boolean isMax(byte val) {
return val == 0;
}
+ @Override
public byte min() {
return (byte) 0xFF;
}
@@ -627,6 +638,7 @@ public class FuzzyRowFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized are equal to the
* corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof FuzzyRowFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index a16e48bcf1..671e596f05 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -65,6 +65,7 @@ public class InclusiveStopFilter extends FilterBase {
return v;
}
+ @Override
public boolean filterRowKey(byte[] buffer, int offset, int length) {
if (buffer == null) {
//noinspection RedundantIfStatement
@@ -81,6 +82,7 @@ public class InclusiveStopFilter extends FilterBase {
return done;
}
+ @Override
public boolean filterAllRemaining() {
return done;
}
@@ -95,6 +97,7 @@ public class InclusiveStopFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.InclusiveStopFilter.Builder builder =
FilterProtos.InclusiveStopFilter.newBuilder();
@@ -124,6 +127,7 @@ public class InclusiveStopFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof InclusiveStopFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index cebb26ac88..3895b2a82a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -88,6 +88,7 @@ public class KeyOnlyFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.KeyOnlyFilter.Builder builder =
FilterProtos.KeyOnlyFilter.newBuilder();
@@ -117,6 +118,7 @@ public class KeyOnlyFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof KeyOnlyFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index 7e9503c22b..fd21dbd465 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -141,6 +141,7 @@ public class MultiRowRangeFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() {
FilterProtos.MultiRowRangeFilter.Builder builder = FilterProtos.MultiRowRangeFilter
.newBuilder();
@@ -193,6 +194,7 @@ public class MultiRowRangeFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized are equal to the
* corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this)
return true;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index d3eb642cc7..96a0b389a2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -115,6 +115,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.MultipleColumnPrefixFilter.Builder builder =
FilterProtos.MultipleColumnPrefixFilter.newBuilder();
@@ -152,6 +153,7 @@ public class MultipleColumnPrefixFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof MultipleColumnPrefixFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index a9b3c8e77e..d2f7f45c0f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -62,6 +62,7 @@ public class NullComparator extends ByteArrayComparable {
/**
* @return The comparator serialized using pb
*/
+ @Override
public byte [] toByteArray() {
ComparatorProtos.NullComparator.Builder builder =
ComparatorProtos.NullComparator.newBuilder();
@@ -90,6 +91,7 @@ public class NullComparator extends ByteArrayComparable {
* @return true if and only if the fields of the comparator that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (!(other instanceof NullComparator)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index ee94d4c672..9692f2287e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -24,11 +24,11 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import java.io.IOException;
import java.util.ArrayList;
+
/**
* Implementation of Filter interface that limits results to a specific page
* size. It terminates scanning once the number of filter-passed rows is >
@@ -72,15 +72,18 @@ public class PageFilter extends FilterBase {
return v;
}
+ @Override
public boolean filterAllRemaining() {
return this.rowsAccepted >= this.pageSize;
}
+ @Override
public boolean filterRow() {
this.rowsAccepted++;
return this.rowsAccepted > this.pageSize;
}
+ @Override
public boolean hasFilterRow() {
return true;
}
@@ -95,6 +98,7 @@ public class PageFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.PageFilter.Builder builder =
FilterProtos.PageFilter.newBuilder();
@@ -124,6 +128,7 @@ public class PageFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof PageFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
index 4ba675b863..cdb611c86f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
@@ -30,6 +30,7 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EmptyStackException;
@@ -263,7 +264,7 @@ public class ParseFilter {
e.printStackTrace();
}
throw new IllegalArgumentException("Incorrect filter string " +
- new String(filterStringAsByteArray));
+ new String(filterStringAsByteArray, StandardCharsets.UTF_8));
}
/**
@@ -811,9 +812,9 @@ public class ParseFilter {
else if (Bytes.equals(comparatorType, ParseConstants.binaryPrefixType))
return new BinaryPrefixComparator(comparatorValue);
else if (Bytes.equals(comparatorType, ParseConstants.regexStringType))
- return new RegexStringComparator(new String(comparatorValue));
+ return new RegexStringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
else if (Bytes.equals(comparatorType, ParseConstants.substringType))
- return new SubstringComparator(new String(comparatorValue));
+ return new SubstringComparator(new String(comparatorValue, StandardCharsets.UTF_8));
else
throw new IllegalArgumentException("Incorrect comparatorType");
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index 8030ff60ca..a3fc440023 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -50,6 +50,7 @@ public class PrefixFilter extends FilterBase {
return prefix;
}
+ @Override
public boolean filterRowKey(byte[] buffer, int offset, int length) {
if (buffer == null || this.prefix == null)
return true;
@@ -80,14 +81,17 @@ public class PrefixFilter extends FilterBase {
return v;
}
+ @Override
public boolean filterRow() {
return filterRow;
}
+ @Override
public void reset() {
filterRow = true;
}
+ @Override
public boolean filterAllRemaining() {
return passedPrefix;
}
@@ -102,6 +106,7 @@ public class PrefixFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.PrefixFilter.Builder builder =
FilterProtos.PrefixFilter.newBuilder();
@@ -131,6 +136,7 @@ public class PrefixFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof PrefixFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index bf3a5f997b..bf503c2b05 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -81,6 +81,7 @@ public class QualifierFilter extends CompareFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.QualifierFilter.Builder builder =
FilterProtos.QualifierFilter.newBuilder();
@@ -120,6 +121,7 @@ public class QualifierFilter extends CompareFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof QualifierFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
index 243923f0e8..f6d091ffc5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
@@ -90,7 +90,8 @@ public class RandomRowFilter extends FilterBase {
public boolean filterRow() {
return filterOutRow;
}
-
+
+ @Override
public boolean hasFilterRow() {
return true;
}
@@ -118,6 +119,7 @@ public class RandomRowFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.RandomRowFilter.Builder builder =
FilterProtos.RandomRowFilter.newBuilder();
@@ -147,6 +149,7 @@ public class RandomRowFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof RandomRowFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
index 23a1e5d268..ee32a927c1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
@@ -97,6 +97,7 @@ public class RowFilter extends CompareFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.RowFilter.Builder builder =
FilterProtos.RowFilter.newBuilder();
@@ -136,6 +137,7 @@ public class RowFilter extends CompareFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof RowFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
index 5c8668b28b..7dc0387046 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
@@ -97,6 +97,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
}
// We cleaned result row in FilterRow to be consistent with scanning process.
+ @Override
public boolean hasFilterRow() {
return true;
}
@@ -132,6 +133,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.SingleColumnValueExcludeFilter.Builder builder =
FilterProtos.SingleColumnValueExcludeFilter.newBuilder();
@@ -175,6 +177,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof SingleColumnValueExcludeFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index 7dad1a4b10..0cf3e2f47c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -216,16 +216,19 @@ public class SingleColumnValueFilter extends FilterBase {
}
}
+ @Override
public boolean filterRow() {
// If column was found, return false if it was matched, true if it was not
// If column not found, return true if we filter if missing, false if not
return this.foundColumn? !this.matchedColumn: this.filterIfMissing;
}
+ @Override
public boolean hasFilterRow() {
return true;
}
+ @Override
public void reset() {
foundColumn = false;
matchedColumn = false;
@@ -325,6 +328,7 @@ public class SingleColumnValueFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
return convert().toByteArray();
}
@@ -364,6 +368,7 @@ public class SingleColumnValueFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof SingleColumnValueFilter)) return false;
@@ -382,6 +387,7 @@ public class SingleColumnValueFilter extends FilterBase {
* column in whole scan. If filterIfMissing == false, all families are essential,
* because of possibility of skipping the rows without any data in filtered CF.
*/
+ @Override
public boolean isFamilyEssential(byte[] name) {
return !this.filterIfMissing || Bytes.equals(name, this.columnFamily);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
index dd06995c7c..9bc18a45b4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
@@ -87,10 +87,12 @@ public class SkipFilter extends FilterBase {
return filter.transformCell(v);
}
+ @Override
public boolean filterRow() {
return filterRow;
}
+ @Override
public boolean hasFilterRow() {
return true;
}
@@ -98,6 +100,7 @@ public class SkipFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() throws IOException {
FilterProtos.SkipFilter.Builder builder =
FilterProtos.SkipFilter.newBuilder();
@@ -131,6 +134,7 @@ public class SkipFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof SkipFilter)) return false;
@@ -139,6 +143,7 @@ public class SkipFilter extends FilterBase {
return getFilter().areSerializedFieldsEqual(other.getFilter());
}
+ @Override
public boolean isFamilyEssential(byte[] name) throws IOException {
return filter.isFamilyEssential(name);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
index 1f0043c0c5..6c872f37f9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
@@ -71,6 +71,7 @@ public class SubstringComparator extends ByteArrayComparable {
/**
* @return The comparator serialized using pb
*/
+ @Override
public byte [] toByteArray() {
ComparatorProtos.SubstringComparator.Builder builder =
ComparatorProtos.SubstringComparator.newBuilder();
@@ -100,6 +101,7 @@ public class SubstringComparator extends ByteArrayComparable {
* @return true if and only if the fields of the comparator that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
if (other == this) return true;
if (!(other instanceof SubstringComparator)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
index be5a0f6fe9..f28560bfd6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
@@ -124,6 +124,7 @@ public class TimestampsFilter extends FilterBase {
*
* @throws IOException This will never happen.
*/
+ @Override
public Cell getNextCellHint(Cell currentCell) throws IOException {
if (!canHint) {
return null;
@@ -168,6 +169,7 @@ public class TimestampsFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() {
FilterProtos.TimestampsFilter.Builder builder =
FilterProtos.TimestampsFilter.newBuilder();
@@ -199,6 +201,7 @@ public class TimestampsFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof TimestampsFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
index 5a46d7a11e..952d64ec86 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
@@ -77,6 +77,7 @@ public class ValueFilter extends CompareFilter {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte [] toByteArray() {
FilterProtos.ValueFilter.Builder builder =
FilterProtos.ValueFilter.newBuilder();
@@ -116,6 +117,7 @@ public class ValueFilter extends CompareFilter {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof ValueFilter)) return false;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
index 7263e1bfcd..1cefe46e90 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
@@ -52,6 +52,7 @@ public class WhileMatchFilter extends FilterBase {
return filter;
}
+ @Override
public void reset() throws IOException {
this.filter.reset();
}
@@ -99,6 +100,7 @@ public class WhileMatchFilter extends FilterBase {
/**
* @return The filter serialized using pb
*/
+ @Override
public byte[] toByteArray() throws IOException {
FilterProtos.WhileMatchFilter.Builder builder =
FilterProtos.WhileMatchFilter.newBuilder();
@@ -132,6 +134,7 @@ public class WhileMatchFilter extends FilterBase {
* @return true if and only if the fields of the filter that are serialized
* are equal to the corresponding fields in other. Used for testing.
*/
+ @Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) return true;
if (!(o instanceof WhileMatchFilter)) return false;
@@ -140,6 +143,7 @@ public class WhileMatchFilter extends FilterBase {
return getFilter().areSerializedFieldsEqual(other.getFilter());
}
+ @Override
public boolean isFamilyEssential(byte[] name) throws IOException {
return filter.isFamilyEssential(name);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index caa19b8423..ebbf9e0304 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -242,7 +242,7 @@ public abstract class AbstractRpcClient implements RpcC
return null;
}
try {
- return (Codec) Class.forName(className).newInstance();
+ return (Codec) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed getting codec " + className, e);
}
@@ -270,7 +270,7 @@ public abstract class AbstractRpcClient implements RpcC
return null;
}
try {
- return (CompressionCodec) Class.forName(className).newInstance();
+ return (CompressionCodec) Class.forName(className).getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed getting compressor " + className, e);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
index d27602e7de..4a83fdd999 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java
@@ -67,6 +67,7 @@ public class BlockingRpcClient extends AbstractRpcClient
* Creates a connection. Can be overridden by a subclass for testing.
* @param remoteId - the ConnectionId to use for the connection creation.
*/
+ @Override
protected BlockingRpcConnection createConnection(ConnectionId remoteId) throws IOException {
return new BlockingRpcConnection(this, remoteId);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
index 33fc880a8c..43986ddcef 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
@@ -57,20 +57,31 @@ public class ConnectionId {
}
@Override
- public boolean equals(Object obj) {
- if (obj instanceof ConnectionId) {
- ConnectionId id = (ConnectionId) obj;
- return address.equals(id.address) &&
- ((ticket != null && ticket.equals(id.ticket)) ||
- (ticket == id.ticket)) &&
- this.serviceName == id.serviceName;
- }
- return false;
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((address == null) ? 0 : address.hashCode());
+ result = prime * result + ((serviceName == null) ? 0 : serviceName.hashCode());
+ result = prime * result + ((ticket == null) ? 0 : ticket.hashCode());
+ return result;
}
- @Override // simply use the default Object#hashcode() ?
- public int hashCode() {
- return hashCode(ticket,serviceName,address);
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null) return false;
+ if (getClass() != obj.getClass()) return false;
+ ConnectionId other = (ConnectionId) obj;
+ if (address == null) {
+ if (other.address != null) return false;
+ } else if (!address.equals(other.address)) return false;
+ if (serviceName == null) {
+ if (other.serviceName != null) return false;
+ } else if (!serviceName.equals(other.serviceName)) return false;
+ if (ticket == null) {
+ if (other.ticket != null) return false;
+ } else if (!ticket.equals(other.ticket)) return false;
+ return true;
}
public static int hashCode(User ticket, String serviceName, InetSocketAddress address){
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
index 5b8498db77..305c7e0a3d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java
@@ -446,10 +446,9 @@ public final class ResponseConverter {
public static Map getScanMetrics(ScanResponse response) {
Map metricMap = new HashMap();
- if (response == null || !response.hasScanMetrics() || response.getScanMetrics() == null) {
+ if (response == null || !response.hasScanMetrics()) {
return metricMap;
- }
-
+ }
ScanMetrics metrics = response.getScanMetrics();
int numberOfMetrics = metrics.getMetricsCount();
for (int i = 0; i < numberOfMetrics; i++) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
index 70e43561e0..7b6b5463e9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
@@ -67,6 +67,7 @@ public final class QuotaRetriever implements Closeable, Iterable
}
}
+ @Override
public void close() throws IOException {
if (this.table != null) {
this.table.close();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
index a4a9720514..6eeb46beb9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class RegionServerRunningException extends IOException {
- private static final long serialVersionUID = 1L << 31 - 1L;
+ private static final long serialVersionUID = 1L << (31 - 1L);
/** Default Constructor */
public RegionServerRunningException() {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index c2999ecf0c..6fefb367c9 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
@@ -39,14 +39,12 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState;
-import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.zookeeper.ZKConfig;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.KeeperException.NoNodeException;
/**
* This class provides an implementation of the ReplicationPeers interface using Zookeeper. The
@@ -80,14 +78,12 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re
// Map of peer clusters keyed by their id
private Map peerClusters;
private final ReplicationQueuesClient queuesClient;
- private Abortable abortable;
private static final Log LOG = LogFactory.getLog(ReplicationPeersZKImpl.class);
public ReplicationPeersZKImpl(final ZooKeeperWatcher zk, final Configuration conf,
final ReplicationQueuesClient queuesClient, Abortable abortable) {
super(zk, conf, abortable);
- this.abortable = abortable;
this.peerClusters = new ConcurrentHashMap();
this.queuesClient = queuesClient;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
index f9f2d43cd0..8e4871db36 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java
@@ -102,6 +102,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when a new node has been created.
* @param path full path of the new node
*/
+ @Override
public void nodeCreated(String path) {
refreshListIfRightPath(path);
}
@@ -110,6 +111,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when a node has been deleted
* @param path full path of the deleted node
*/
+ @Override
public void nodeDeleted(String path) {
if (stopper.isStopped()) {
return;
@@ -128,6 +130,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when an existing node has a child node added or removed.
* @param path full path of the node whose children have changed
*/
+ @Override
public void nodeChildrenChanged(String path) {
if (stopper.isStopped()) {
return;
@@ -159,6 +162,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when a node has been deleted
* @param path full path of the deleted node
*/
+ @Override
public void nodeDeleted(String path) {
List peers = refreshPeersList(path);
if (peers == null) {
@@ -177,6 +181,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements
* Called when an existing node has a child node added or removed.
* @param path full path of the node whose children have changed
*/
+ @Override
public void nodeChildrenChanged(String path) {
List peers = refreshPeersList(path);
if (peers == null) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index 54c1701f5c..b26dcac89d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.security;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.TreeMap;
@@ -67,15 +68,15 @@ public class SaslUtil {
}
static String encodeIdentifier(byte[] identifier) {
- return new String(Base64.encodeBase64(identifier));
+ return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8);
}
static byte[] decodeIdentifier(String identifier) {
- return Base64.decodeBase64(identifier.getBytes());
+ return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
}
static char[] encodePassword(byte[] password) {
- return new String(Base64.encodeBase64(password)).toCharArray();
+ return new String(Base64.encodeBase64(password), StandardCharsets.UTF_8).toCharArray();
}
/**
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
index 3a01ace763..c904eef1bd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
@@ -48,7 +48,7 @@ public class Permission extends VersionedWritable {
public enum Action {
READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A');
- private byte code;
+ private final byte code;
Action(char code) {
this.code = (byte)code;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index fd1a9d590f..b06acbdffc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -132,6 +132,7 @@ public class VisibilityClient {
BlockingRpcCallback rpcCallback =
new BlockingRpcCallback();
+ @Override
public VisibilityLabelsResponse call(VisibilityLabelsService service)
throws IOException {
VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder();
@@ -217,6 +218,7 @@ public class VisibilityClient {
BlockingRpcCallback rpcCallback =
new BlockingRpcCallback();
+ @Override
public GetAuthsResponse call(VisibilityLabelsService service) throws IOException {
GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder();
getAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
@@ -268,6 +270,7 @@ public class VisibilityClient {
BlockingRpcCallback rpcCallback =
new BlockingRpcCallback();
+ @Override
public ListLabelsResponse call(VisibilityLabelsService service) throws IOException {
ListLabelsRequest.Builder listAuthLabelsReqBuilder = ListLabelsRequest.newBuilder();
if (regex != null) {
@@ -332,6 +335,7 @@ public class VisibilityClient {
BlockingRpcCallback rpcCallback =
new BlockingRpcCallback();
+ @Override
public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException {
SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder();
setAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user)));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
index 67aaffd877..e474b1e9e2 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
@@ -298,7 +298,7 @@ public class PoolMap implements Map {
* the type of the resource
*/
@SuppressWarnings("serial")
- public class ReusablePool extends ConcurrentLinkedQueue implements Pool {
+ public static class ReusablePool extends ConcurrentLinkedQueue implements Pool {
private int maxSize;
public ReusablePool(int maxSize) {
@@ -342,7 +342,7 @@ public class PoolMap implements Map {
*
*/
@SuppressWarnings("serial")
- class RoundRobinPool extends CopyOnWriteArrayList implements Pool {
+ static class RoundRobinPool extends CopyOnWriteArrayList implements Pool {
private int maxSize;
private int nextResource = 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
index 20791de453..8075a7aa1f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java
@@ -30,5 +30,6 @@ public class EmptyWatcher implements Watcher {
public static final EmptyWatcher instance = new EmptyWatcher();
private EmptyWatcher() {}
+ @Override
public void process(WatchedEvent event) {}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
index 226796aff7..6d1772d14a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
@@ -41,6 +41,7 @@ import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
@@ -159,7 +160,7 @@ public class HQuorumPeer {
}
File myIdFile = new File(dataDir, "myid");
- PrintWriter w = new PrintWriter(myIdFile);
+ PrintWriter w = new PrintWriter(myIdFile, StandardCharsets.UTF_8.name());
w.println(myId);
w.close();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
index cc0f5f27ad..62dc17d7f0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
@@ -43,6 +43,7 @@ import org.apache.zookeeper.KeeperException;
public class ZKLeaderManager extends ZooKeeperListener {
private static final Log LOG = LogFactory.getLog(ZKLeaderManager.class);
+ private final Object lock = new Object();
private final AtomicBoolean leaderExists = new AtomicBoolean();
private String leaderZNode;
private byte[] nodeId;
@@ -85,14 +86,14 @@ public class ZKLeaderManager extends ZooKeeperListener {
private void handleLeaderChange() {
try {
- synchronized(leaderExists) {
+ synchronized(lock) {
if (ZKUtil.watchAndCheckExists(watcher, leaderZNode)) {
LOG.info("Found new leader for znode: "+leaderZNode);
leaderExists.set(true);
} else {
LOG.info("Leader change, but no new leader found");
leaderExists.set(false);
- leaderExists.notifyAll();
+ lock.notifyAll();
}
}
} catch (KeeperException ke) {
@@ -136,10 +137,10 @@ public class ZKLeaderManager extends ZooKeeperListener {
}
// wait for next chance
- synchronized(leaderExists) {
+ synchronized(lock) {
while (leaderExists.get() && !candidate.isStopped()) {
try {
- leaderExists.wait();
+ lock.wait();
} catch (InterruptedException ie) {
LOG.debug("Interrupted waiting on leader", ie);
}
@@ -153,7 +154,7 @@ public class ZKLeaderManager extends ZooKeeperListener {
*/
public void stepDownAsLeader() {
try {
- synchronized(leaderExists) {
+ synchronized(lock) {
if (!leaderExists.get()) {
return;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
index d874768e20..4f1d87c6b3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
@@ -19,11 +19,14 @@
package org.apache.hadoop.hbase.zookeeper;
import java.io.BufferedReader;
+import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
import java.net.Socket;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
@@ -1954,9 +1957,11 @@ public class ZKUtil {
socket.connect(sockAddr, timeout);
socket.setSoTimeout(timeout);
- PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
- BufferedReader in = new BufferedReader(new InputStreamReader(
- socket.getInputStream()));
+ PrintWriter out = new PrintWriter(new BufferedWriter(
+ new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8)),
+ true);
+ BufferedReader in = new BufferedReader(
+ new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
out.println("stat");
out.flush();
ArrayList res = new ArrayList();
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
index 1966253a0f..a4f926098f 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java
@@ -19,6 +19,9 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.io.compress.Compression;
@@ -77,7 +80,8 @@ public class TestHColumnDescriptor {
public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty()
throws Exception {
try {
- new HColumnDescriptor("".getBytes());
+ new HColumnDescriptor("".getBytes(StandardCharsets.UTF_8));
+ fail("Did not throw");
} catch (IllegalArgumentException e) {
assertEquals("Family name can not be empty", e.getLocalizedMessage());
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
index d126994b61..23d2946db1 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
@@ -100,7 +100,7 @@ public class TestHTableDescriptor {
assertEquals(v, deserializedHtd.getMaxFileSize());
assertTrue(deserializedHtd.isReadOnly());
assertEquals(Durability.ASYNC_WAL, deserializedHtd.getDurability());
- assertEquals(deserializedHtd.getRegionReplication(), 2);
+ assertEquals(2, deserializedHtd.getRegionReplication());
}
/**
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
index 0e0fbb024a..b19ba36974 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java
@@ -62,7 +62,7 @@ public class TestInterfaceAudienceAnnotations {
private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class);
/** Selects classes with generated in their package name */
- class GeneratedClassFilter implements ClassFinder.ClassFilter {
+ static class GeneratedClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class> c) {
return c.getPackage().getName().contains("generated");
@@ -181,7 +181,7 @@ public class TestInterfaceAudienceAnnotations {
}
/** Selects classes that are declared public */
- class PublicClassFilter implements ClassFinder.ClassFilter {
+ static class PublicClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class> c) {
int mod = c.getModifiers();
@@ -190,7 +190,7 @@ public class TestInterfaceAudienceAnnotations {
}
/** Selects paths (jars and class dirs) only from the main code, not test classes */
- class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
+ static class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
@Override
public boolean isCandidatePath(String resourcePath, boolean isJar) {
return !resourcePath.contains("test-classes") &&
@@ -207,7 +207,7 @@ public class TestInterfaceAudienceAnnotations {
* - enclosing class is not an interface
* - name starts with "__CLR"
*/
- class CloverInstrumentationFilter implements ClassFinder.ClassFilter {
+ static class CloverInstrumentationFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class> clazz) {
boolean clover = false;
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
index 8c0b7df26d..363a221718 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.io.InterruptedIOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -88,15 +89,17 @@ import org.junit.experimental.categories.Category;
import org.junit.rules.Timeout;
import org.mockito.Mockito;
+import com.google.common.collect.Lists;
+
@Category(MediumTests.class)
public class TestAsyncProcess {
private final static Log LOG = LogFactory.getLog(TestAsyncProcess.class);
private static final TableName DUMMY_TABLE =
TableName.valueOf("DUMMY_TABLE");
- private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes();
- private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes();
- private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes();
- private static final byte[] FAILS = "FAILS".getBytes();
+ private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] FAILS = "FAILS".getBytes(StandardCharsets.UTF_8);
private static final Configuration conf = new Configuration();
private static ServerName sn = ServerName.valueOf("s1:1,1");
@@ -353,7 +356,8 @@ public class TestAsyncProcess {
return inc.getAndIncrement();
}
}
- class MyAsyncProcessWithReplicas extends MyAsyncProcess {
+
+ static class MyAsyncProcessWithReplicas extends MyAsyncProcess {
private Set failures = new TreeSet(new Bytes.ByteArrayComparator());
private long primarySleepMs = 0, replicaSleepMs = 0;
private Map customPrimarySleepMs = new HashMap();
@@ -625,7 +629,13 @@ public class TestAsyncProcess {
Random rn = new Random();
final long limit = 10 * 1024 * 1024;
final int requestCount = 1 + (int) (rn.nextDouble() * 3);
- long putsHeapSize = Math.abs(rn.nextLong()) % limit;
+ long n = rn.nextLong();
+ if (n < 0) {
+ n = -n;
+ } else if (n == 0) {
+ n = 1;
+ }
+ long putsHeapSize = n % limit;
long maxHeapSizePerRequest = putsHeapSize / requestCount;
LOG.info("[testSubmitRandomSizeRequest] maxHeapSizePerRequest=" + maxHeapSizePerRequest +
", putsHeapSize=" + putsHeapSize);
@@ -747,7 +757,7 @@ public class TestAsyncProcess {
final AsyncRequestFuture ars = ap.submit(DUMMY_TABLE, puts, false, cb, false);
Assert.assertTrue(puts.isEmpty());
ars.waitUntilDone();
- Assert.assertEquals(updateCalled.get(), 1);
+ Assert.assertEquals(1, updateCalled.get());
}
@Test
@@ -759,12 +769,12 @@ public class TestAsyncProcess {
puts.add(createPut(1, true));
for (int i = 0; i != ap.maxConcurrentTasksPerRegion; ++i) {
- ap.incTaskCounters(Arrays.asList(hri1.getRegionName()), sn);
+ ap.incTaskCounters(Lists.newArrayList(hri1.getRegionName()), sn);
}
ap.submit(DUMMY_TABLE, puts, false, null, false);
Assert.assertEquals(puts.size(), 1);
- ap.decTaskCounters(Arrays.asList(hri1.getRegionName()), sn);
+ ap.decTaskCounters(Lists.newArrayList(hri1.getRegionName()), sn);
ap.submit(DUMMY_TABLE, puts, false, null, false);
Assert.assertEquals(0, puts.size());
}
@@ -945,7 +955,7 @@ public class TestAsyncProcess {
final AsyncProcess ap = new MyAsyncProcess(createHConnection(), conf, false);
for (int i = 0; i < 1000; i++) {
- ap.incTaskCounters(Arrays.asList("dummy".getBytes()), sn);
+ ap.incTaskCounters(Lists.newArrayList("dummy".getBytes(StandardCharsets.UTF_8)), sn);
}
final Thread myThread = Thread.currentThread();
@@ -976,7 +986,7 @@ public class TestAsyncProcess {
public void run() {
Threads.sleep(sleepTime);
while (ap.tasksInProgress.get() > 0) {
- ap.decTaskCounters(Arrays.asList("dummy".getBytes()), sn);
+ ap.decTaskCounters(Lists.newArrayList("dummy".getBytes(StandardCharsets.UTF_8)), sn);
}
}
};
@@ -1336,13 +1346,13 @@ public class TestAsyncProcess {
} catch (RetriesExhaustedException expected) {
}
- Assert.assertEquals(res[0], success);
- Assert.assertEquals(res[1], success);
- Assert.assertEquals(res[2], success);
- Assert.assertEquals(res[3], success);
- Assert.assertEquals(res[4], failure);
- Assert.assertEquals(res[5], success);
- Assert.assertEquals(res[6], failure);
+ Assert.assertEquals(success, res[0]);
+ Assert.assertEquals(success, res[1]);
+ Assert.assertEquals(success, res[2]);
+ Assert.assertEquals(success, res[3]);
+ Assert.assertEquals(failure, res[4]);
+ Assert.assertEquals(success, res[5]);
+ Assert.assertEquals(failure, res[6]);
}
@Test
public void testErrorsServers() throws IOException {
@@ -1479,7 +1489,7 @@ public class TestAsyncProcess {
ht.batch(gets, new Object[gets.size()]);
- Assert.assertEquals(ap.nbActions.get(), NB_REGS);
+ Assert.assertEquals(NB_REGS, ap.nbActions.get());
Assert.assertEquals("1 multi response per server", 2, ap.nbMultiResponse.get());
Assert.assertEquals("1 thread per server", 2, con.nbThreads.get());
@@ -1487,7 +1497,7 @@ public class TestAsyncProcess {
for (int i =0; i token = createTokenMock();
- when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
- when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes());
+ when(token.getIdentifier())
+ .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
+ when(token.getPassword())
+ .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
final NameCallback nameCallback = mock(NameCallback.class);
final PasswordCallback passwordCallback = mock(PasswordCallback.class);
@@ -120,8 +123,10 @@ public class TestHBaseSaslRpcClient {
@Test
public void testSaslClientCallbackHandlerWithException() {
final Token extends TokenIdentifier> token = createTokenMock();
- when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
- when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes());
+ when(token.getIdentifier())
+ .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
+ when(token.getPassword())
+ .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token);
try {
saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) });
@@ -291,8 +296,10 @@ public class TestHBaseSaslRpcClient {
throws IOException {
Token extends TokenIdentifier> token = createTokenMock();
if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(password)) {
- when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes());
- when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes());
+ when(token.getIdentifier())
+ .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8));
+ when(token.getPassword())
+ .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8));
}
return token;
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
index 9990cd1afd..f5a7a340ba 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java
@@ -23,7 +23,6 @@ import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -51,7 +50,7 @@ public class TestZKUtil {
String node = "/hbase/testUnsecure";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List aclList = ZKUtil.createACL(watcher, node, false);
- Assert.assertEquals(aclList.size(), 1);
+ Assert.assertEquals(1, aclList.size());
Assert.assertTrue(aclList.contains(Ids.OPEN_ACL_UNSAFE.iterator().next()));
}
@@ -62,7 +61,7 @@ public class TestZKUtil {
String node = "/hbase/testSecuritySingleSuperuser";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List aclList = ZKUtil.createACL(watcher, node, true);
- Assert.assertEquals(aclList.size(), 2); // 1+1, since ACL will be set for the creator by default
+ Assert.assertEquals(2, aclList.size()); // 1+1, since ACL will be set for the creator by default
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user1"))));
Assert.assertTrue(aclList.contains(Ids.CREATOR_ALL_ACL.iterator().next()));
}
@@ -74,7 +73,7 @@ public class TestZKUtil {
String node = "/hbase/testCreateACL";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List aclList = ZKUtil.createACL(watcher, node, true);
- Assert.assertEquals(aclList.size(), 4); // 3+1, since ACL will be set for the creator by default
+ Assert.assertEquals(4, aclList.size()); // 3+1, since ACL will be set for the creator by default
Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group1"))));
Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group2"))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user1"))));
@@ -90,13 +89,14 @@ public class TestZKUtil {
String node = "/hbase/testCreateACL";
ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false);
List aclList = ZKUtil.createACL(watcher, node, true);
- Assert.assertEquals(aclList.size(), 3); // 3, since service user the same as one of superuser
+ Assert.assertEquals(3, aclList.size()); // 3, since service user the same as one of superuser
Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group1"))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("auth", ""))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user5"))));
Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user6"))));
}
+ @Test
public void testInterruptedDuringAction()
throws ZooKeeperConnectionException, IOException, KeeperException, InterruptedException {
final RecoverableZooKeeper recoverableZk = Mockito.mock(RecoverableZooKeeper.class);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
index 338265dd42..701c6e1cb5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java
@@ -40,6 +40,7 @@ public class AsyncConsoleAppender extends AsyncAppender {
consoleAppender.setTarget(value);
}
+ @Override
public void activateOptions() {
consoleAppender.activateOptions();
super.activateOptions();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 0290ded596..981fad85de 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -633,7 +633,7 @@ public final class CellUtil {
if (cell instanceof KeyValue) {
return ((KeyValue)cell).heapSizeWithoutTags();
}
- return getSumOfCellKeyElementLengths(cell) + cell.getValueLength();
+ return (long) getSumOfCellKeyElementLengths(cell) + cell.getValueLength();
}
/********************* tags *************************************/
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
index c330fa75cf..980f001f69 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
@@ -51,12 +51,13 @@ public class JitterScheduledThreadPoolExecutorImpl extends ScheduledThreadPoolEx
this.spread = spread;
}
+ @Override
protected java.util.concurrent.RunnableScheduledFuture decorateTask(
Runnable runnable, java.util.concurrent.RunnableScheduledFuture task) {
return new JitteredRunnableScheduledFuture<>(task);
}
-
+ @Override
protected java.util.concurrent.RunnableScheduledFuture decorateTask(
Callable callable, java.util.concurrent.RunnableScheduledFuture task) {
return new JitteredRunnableScheduledFuture<>(task);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 7670aea283..2b09faf10b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -197,9 +197,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
*/
public static long getKeyValueDataStructureSize(int klength, int vlength, int tagsLength) {
if (tagsLength == 0) {
- return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength;
+ return (long) KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength;
}
- return KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength;
+ return (long) KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength;
}
/**
@@ -213,7 +213,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
* @return the key data structure length
*/
public static long getKeyDataStructureSize(int rlength, int flength, int qlength) {
- return KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength;
+ return (long) KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength;
}
/**
@@ -2531,7 +2531,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
int length = kv.getLength();
out.writeInt(length);
out.write(kv.getBuffer(), kv.getOffset(), length);
- return length + Bytes.SIZEOF_INT;
+ return (long) length + Bytes.SIZEOF_INT;
}
/**
@@ -2553,7 +2553,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
// This does same as DataOuput#writeInt (big-endian, etc.)
out.write(Bytes.toBytes(length));
out.write(kv.getBuffer(), kv.getOffset(), length);
- return length + Bytes.SIZEOF_INT;
+ return (long) length + Bytes.SIZEOF_INT;
}
/**
@@ -2580,7 +2580,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId,
// This does same as DataOuput#writeInt (big-endian, etc.)
StreamUtils.writeInt(out, length);
out.write(kv.getBuffer(), kv.getOffset(), length);
- return length + Bytes.SIZEOF_INT;
+ return (long) length + Bytes.SIZEOF_INT;
}
/**
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
index 939002cb13..e427e50d63 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java
@@ -79,6 +79,7 @@ public class ProcedureInfo implements Cloneable {
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="CN_IDIOM_NO_SUPER_CALL",
justification="Intentional; calling super class clone doesn't make sense here.")
+ @Override
public ProcedureInfo clone() {
return new ProcedureInfo(procId, procName, procOwner, procState, parentId, nonceKey,
exception, lastUpdate, startTime, result);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
index b4efaf8039..21c841c814 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java
@@ -80,6 +80,7 @@ public class CellCodecWithTags implements Codec {
super(in);
}
+ @Override
protected Cell parseCell() throws IOException {
byte[] row = readByteArray(this.in);
byte[] family = readByteArray(in);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
index 07fd838201..6df9ec3ec4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java
@@ -64,6 +64,7 @@ public class KeyValueCodec implements Codec {
super(in);
}
+ @Override
protected Cell parseCell() throws IOException {
return KeyValueUtil.iscreate(in, false);
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
index 5d34a46f83..c241785e52 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java
@@ -70,6 +70,7 @@ public class KeyValueCodecWithTags implements Codec {
super(in);
}
+ @Override
protected Cell parseCell() throws IOException {
return KeyValueUtil.iscreate(in, true);
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
index 4d526143dd..8c371a6d86 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
@@ -60,7 +60,7 @@ public class BoundedByteBufferPool {
volatile int runningAverage;
// Scratch that keeps rough total size of pooled bytebuffers
- private volatile int totalReservoirCapacity;
+ private AtomicLong totalReservoirCapacity = new AtomicLong(0);
// For reporting
private AtomicLong allocations = new AtomicLong(0);
@@ -89,7 +89,7 @@ public class BoundedByteBufferPool {
try {
bb = this.buffers.poll();
if (bb != null) {
- this.totalReservoirCapacity -= bb.capacity();
+ this.totalReservoirCapacity.addAndGet(-bb.capacity());
}
} finally {
lock.unlock();
@@ -119,8 +119,8 @@ public class BoundedByteBufferPool {
try {
success = this.buffers.offer(bb);
if (success) {
- this.totalReservoirCapacity += bb.capacity();
- average = this.totalReservoirCapacity / this.buffers.size(); // size will never be 0.
+ average = (int) this.totalReservoirCapacity.addAndGet(bb.capacity()) /
+ this.buffers.size(); // size will never be 0.
}
} finally {
lock.unlock();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
index d74a5d6283..dd7e300883 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
@@ -148,6 +148,7 @@ implements WritableComparable {
return this.offset;
}
+ @Override
public void readFields(final DataInput in) throws IOException {
this.length = in.readInt();
this.bytes = new byte[this.length];
@@ -155,6 +156,7 @@ implements WritableComparable {
this.offset = 0;
}
+ @Override
public void write(final DataOutput out) throws IOException {
out.writeInt(this.length);
out.write(this.bytes, this.offset, this.length);
@@ -175,6 +177,7 @@ implements WritableComparable {
* @return Positive if left is bigger than right, 0 if they are equal, and
* negative if left is smaller than right.
*/
+ @Override
public int compareTo(ImmutableBytesWritable that) {
return WritableComparator.compareBytes(
this.bytes, this.offset, this.length,
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
index 9697da334a..d056115a5d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java
@@ -106,7 +106,7 @@ public final class Compression {
LZO("lzo") {
// Use base type to avoid compile-time dependencies.
private volatile transient CompressionCodec lzoCodec;
- private transient Object lock = new Object();
+ private final transient Object lock = new Object();
@Override
CompressionCodec getCodec(Configuration conf) {
@@ -133,7 +133,7 @@ public final class Compression {
},
GZ("gz") {
private volatile transient GzipCodec codec;
- private transient Object lock = new Object();
+ private final transient Object lock = new Object();
@Override
DefaultCodec getCodec(Configuration conf) {
@@ -185,7 +185,7 @@ public final class Compression {
SNAPPY("snappy") {
// Use base type to avoid compile-time dependencies.
private volatile transient CompressionCodec snappyCodec;
- private transient Object lock = new Object();
+ private final transient Object lock = new Object();
@Override
CompressionCodec getCodec(Configuration conf) {
@@ -212,7 +212,7 @@ public final class Compression {
LZ4("lz4") {
// Use base type to avoid compile-time dependencies.
private volatile transient CompressionCodec lz4Codec;
- private transient Object lock = new Object();
+ private final transient Object lock = new Object();
@Override
CompressionCodec getCodec(Configuration conf) {
@@ -239,7 +239,7 @@ public final class Compression {
BZIP2("bzip2") {
// Use base type to avoid compile-time dependencies.
private volatile transient CompressionCodec bzipCodec;
- private transient Object lock = new Object();
+ private final transient Object lock = new Object();
@Override
CompressionCodec getCodec(Configuration conf) {
@@ -266,7 +266,7 @@ public final class Compression {
ZSTD("zstd") {
// Use base type to avoid compile-time dependencies.
private volatile transient CompressionCodec zStandardCodec;
- private transient Object lock = new Object();
+ private final transient Object lock = new Object();
@Override
CompressionCodec getCodec(Configuration conf) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
index d7535e58ce..7d6b15c8b4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
@@ -176,13 +176,10 @@ public enum DataBlockEncoding {
protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){
try {
- return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance();
- } catch (InstantiationException e) {
+ return (DataBlockEncoder)Class.forName(fullyQualifiedClassName)
+ .getDeclaredConstructor().newInstance();
+ } catch (Exception e) {
throw new RuntimeException(e);
- } catch (IllegalAccessException e) {
- throw new RuntimeException(e);
- } catch (ClassNotFoundException e) {
- throw new IllegalArgumentException(e);
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
index 61444784f2..f28100dfc1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
@@ -206,7 +206,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
throws IOException {
- byte flag = 0;
+ int flag = 0; // Do not use more bits that can fit into a byte
int kLength = KeyValueUtil.keyLength(cell);
int vLength = cell.getValueLength();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
index a4fca2ca91..192c84d1bb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
@@ -255,7 +255,7 @@ public class EncodedDataBlock {
}
BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream();
baos.writeTo(stream);
- this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf);
+ this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.ourBytes);
} catch (IOException e) {
throw new RuntimeException(String.format(
"Bug in encoding part of algorithm %s. " +
@@ -266,11 +266,11 @@ public class EncodedDataBlock {
}
private static class BufferGrabbingByteArrayOutputStream extends ByteArrayOutputStream {
- private byte[] buf;
+ private byte[] ourBytes;
@Override
- public void write(byte[] b, int off, int len) {
- this.buf = b;
+ public synchronized void write(byte[] b, int off, int len) {
+ this.ourBytes = b;
}
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
index c14b542181..72b6a5c00e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
@@ -250,7 +250,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
throws IOException {
- byte flag = 0;
+ int flag = 0; // Do not use more bits than will fit into a byte
int kLength = KeyValueUtil.keyLength(cell);
int vLength = cell.getValueLength();
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
index 8562cf06f8..78ae9a64f2 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java
@@ -134,7 +134,7 @@ public class LRUDictionary implements Dictionary {
}
private void moveToHead(Node n) {
- if (head == n) {
+ if (head.equals(n)) {
// no-op -- it's already the head.
return;
}
@@ -147,7 +147,7 @@ public class LRUDictionary implements Dictionary {
if (n.next != null) {
n.next.prev = n.prev;
} else {
- assert n == tail;
+ assert n.equals(tail);
tail = n.prev;
}
// Node is now removed from the list. Re-add it at the head.
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
index 7e6de4ad56..9eac12f7a6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java
@@ -98,11 +98,11 @@ public class UserProvider extends BaseConfigurable {
}
// Provide the reload function that uses the executor thread.
- public ListenableFuture reload(final String k,
- String[] oldValue) throws Exception {
+ @Override
+ public ListenableFuture reload(final String k, String[] oldValue)
+ throws Exception {
return executor.submit(new Callable() {
-
@Override
public String[] call() throws Exception {
return getGroupStrings(k);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
index 2d9c398dad..4818efc1e9 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java
@@ -45,7 +45,7 @@ public class SpanReceiverHost {
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SE_BAD_FIELD")
private static enum SingletonHolder {
INSTANCE;
- Object lock = new Object();
+ final Object lock = new Object();
SpanReceiverHost host = null; // FindBugs: SE_BAD_FIELD
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
index 8de39aec7c..8ed2322619 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java
@@ -706,7 +706,7 @@ public class CopyOnWriteArrayMap extends AbstractMap
}
}
- private final class ArrayKeyIterator implements Iterator {
+ private static final class ArrayKeyIterator implements Iterator {
int index;
private final ArrayHolder holder;
@@ -732,7 +732,7 @@ public class CopyOnWriteArrayMap extends AbstractMap
}
}
- private final class ArrayValueIterator implements Iterator {
+ private static final class ArrayValueIterator implements Iterator {
int index;
private final ArrayHolder holder;
@@ -758,7 +758,7 @@ public class CopyOnWriteArrayMap extends AbstractMap
}
}
- private final class ArrayEntryIterator implements Iterator> {
+ private static final class ArrayEntryIterator implements Iterator> {
int index;
private final ArrayHolder holder;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
index a68d12f9b5..137d1ca912 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java
@@ -189,9 +189,9 @@ public abstract class AbstractByteRange implements ByteRange {
public short getShort(int index) {
int offset = this.offset + index;
short n = 0;
- n ^= bytes[offset] & 0xFF;
- n <<= 8;
- n ^= bytes[offset + 1] & 0xFF;
+ n = (short) ((n ^ bytes[offset]) & 0xFF);
+ n = (short) (n << 8);
+ n = (short) ((n ^ bytes[offset + 1]) & 0xFF);
return n;
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
index 004313501f..00c05cd121 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java
@@ -40,6 +40,7 @@ import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@@ -571,7 +572,7 @@ public class Base64 {
return new String(baos.toByteArray(), PREFERRED_ENCODING);
} catch (UnsupportedEncodingException uue) {
- return new String(baos.toByteArray());
+ return new String(baos.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException e) {
LOG.error("error encoding object", e);
@@ -696,7 +697,7 @@ public class Base64 {
return new String(baos.toByteArray(), PREFERRED_ENCODING);
} catch (UnsupportedEncodingException uue) {
- return new String(baos.toByteArray());
+ return new String(baos.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException e) {
LOG.error("error encoding byte array", e);
@@ -753,7 +754,7 @@ public class Base64 {
return new String(outBuff, 0, e, PREFERRED_ENCODING);
} catch (UnsupportedEncodingException uue) {
- return new String(outBuff, 0, e);
+ return new String(outBuff, 0, e, StandardCharsets.UTF_8);
}
} // end encodeBytes
@@ -928,7 +929,7 @@ public class Base64 {
bytes = s.getBytes(PREFERRED_ENCODING);
} catch (UnsupportedEncodingException uee) {
- bytes = s.getBytes();
+ bytes = s.getBytes(StandardCharsets.UTF_8);
} // end catch
// Decode
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
index d3414dd599..d9f8dcfd1d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java
@@ -167,7 +167,7 @@ public final class ByteBufferArray {
int endBuffer = (int) (end / bufferSize), endOffset = (int) (end % bufferSize);
assert array.length >= len + arrayOffset;
assert startBuffer >= 0 && startBuffer < bufferCount;
- assert endBuffer >= 0 && endBuffer < bufferCount
+ assert (endBuffer >= 0 && endBuffer < bufferCount)
|| (endBuffer == bufferCount && endOffset == 0);
if (startBuffer >= locks.length || startBuffer < 0) {
String msg = "Failed multiple, start=" + start + ",startBuffer="
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
index cf59f694b1..b5b1d96a4d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
@@ -190,27 +190,27 @@ public final class ByteBufferUtils {
return 8;
}
- if (value < (1l << 4 * 8)) {
+ if (value < (1l << (4 * 8))) {
// no more than 4 bytes
- if (value < (1l << 2 * 8)) {
- if (value < (1l << 1 * 8)) {
+ if (value < (1l << (2 * 8))) {
+ if (value < (1l << (1 * 8))) {
return 1;
}
return 2;
}
- if (value < (1l << 3 * 8)) {
+ if (value < (1l << (3 * 8))) {
return 3;
}
return 4;
}
// more than 4 bytes
- if (value < (1l << 6 * 8)) {
- if (value < (1l << 5 * 8)) {
+ if (value < (1l << (6 * 8))) {
+ if (value < (1l << (5 * 8))) {
return 5;
}
return 6;
}
- if (value < (1l << 7 * 8)) {
+ if (value < (1l << (7 * 8))) {
return 7;
}
return 8;
@@ -226,13 +226,13 @@ public final class ByteBufferUtils {
return 4;
}
- if (value < (1 << 2 * 8)) {
- if (value < (1 << 1 * 8)) {
+ if (value < (1 << (2 * 8))) {
+ if (value < (1 << (1 * 8))) {
return 1;
}
return 2;
}
- if (value <= (1 << 3 * 8)) {
+ if (value <= (1 << (3 * 8))) {
return 3;
}
return 4;
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index 820b81e618..93dcafe7a3 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -1122,9 +1122,9 @@ public class Bytes implements Comparable {
return toShortUnsafe(bytes, offset);
} else {
short n = 0;
- n ^= bytes[offset] & 0xFF;
- n <<= 8;
- n ^= bytes[offset+1] & 0xFF;
+ n = (short) ((n ^ bytes[offset]) & 0xFF);
+ n = (short) (n << 8);
+ n = (short) ((n ^ bytes[offset+1]) & 0xFF);
return n;
}
}
@@ -1565,8 +1565,8 @@ public class Bytes implements Comparable {
final int stride = 8;
final int minLength = Math.min(length1, length2);
int strideLimit = minLength & ~(stride - 1);
- final long offset1Adj = offset1 + BYTE_ARRAY_BASE_OFFSET;
- final long offset2Adj = offset2 + BYTE_ARRAY_BASE_OFFSET;
+ final long offset1Adj = (long) offset1 + BYTE_ARRAY_BASE_OFFSET;
+ final long offset2Adj = (long) offset2 + BYTE_ARRAY_BASE_OFFSET;
int i;
/*
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
index 262864ab4f..35acbde092 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java
@@ -62,6 +62,7 @@ public class ClassLoaderBase extends URLClassLoader {
/**
* Returns the lock object for class loading operations.
*/
+ @Override
protected Object getClassLoadingLock(String className) {
Object lock = parallelLockMap.get(className);
if (lock != null) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
index 51e169441c..edb5b2d935 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
@@ -344,7 +344,7 @@ public class ClassSize {
* @return the size estimate, in bytes
*/
private static long estimateBaseFromCoefficients(int [] coeff, boolean debug) {
- long prealign_size = OBJECT + coeff[0] + coeff[2] * REFERENCE;
+ long prealign_size = (long) OBJECT + coeff[0] + coeff[2] * REFERENCE;
// Round up to a multiple of 8
long size = align(prealign_size) + align(coeff[1] * ARRAY);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
index 4457fe00b3..439b32128a 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java
@@ -44,7 +44,8 @@ public abstract class HasThread implements Runnable {
public Thread getThread() {
return thread;
}
-
+
+ @Override
public abstract void run();
//// Begin delegation to Thread
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
index 3625a12566..9e9fc6fab7 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java
@@ -26,6 +26,7 @@ import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.RuntimeMXBean;
import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -160,7 +161,7 @@ public class JVM {
new String[]{"bash", "-c",
"ls /proc/" + pidhost[0] + "/fdinfo | wc -l"});
inputStream = p.getInputStream();
- inputStreamReader = new InputStreamReader(inputStream);
+ inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
bufferedReader = new BufferedReader(inputStreamReader);
String openFileDesCount;
if ((openFileDesCount = bufferedReader.readLine()) != null) {
@@ -236,7 +237,7 @@ public class JVM {
int count = 0;
Process p = Runtime.getRuntime().exec("ps -e");
inputStream = p.getInputStream();
- inputStreamReader = new InputStreamReader(inputStream);
+ inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8);
bufferedReader = new BufferedReader(inputStreamReader);
while (bufferedReader.readLine() != null) {
count++;
@@ -288,7 +289,7 @@ public class JVM {
//using linux bash commands to retrieve info
Process p = Runtime.getRuntime().exec(new String[]{"bash", "-c", "ulimit -n"});
in = p.getInputStream();
- output = new BufferedReader(new InputStreamReader(in));
+ output = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
String maxFileDesCount;
if ((maxFileDesCount = output.readLine()) != null) {
return Long.parseLong(maxFileDesCount);
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
index 9bdb829b94..a969949500 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java
@@ -67,6 +67,7 @@ public class LongAdder extends Striped64 implements Serializable {
/**
* Version of plus for use in retryUpdate
*/
+ @Override
final long fn(long v, long x) { return v + x; }
/**
@@ -171,6 +172,7 @@ public class LongAdder extends Striped64 implements Serializable {
* Returns the String representation of the {@link #sum}.
* @return the String representation of the {@link #sum}
*/
+ @Override
public String toString() {
return Long.toString(sum());
}
@@ -180,6 +182,7 @@ public class LongAdder extends Striped64 implements Serializable {
*
* @return the sum
*/
+ @Override
public long longValue() {
return sum();
}
@@ -188,6 +191,7 @@ public class LongAdder extends Striped64 implements Serializable {
* Returns the {@link #sum} as an {@code int} after a narrowing
* primitive conversion.
*/
+ @Override
public int intValue() {
return (int)sum();
}
@@ -196,6 +200,7 @@ public class LongAdder extends Striped64 implements Serializable {
* Returns the {@link #sum} as a {@code float}
* after a widening primitive conversion.
*/
+ @Override
public float floatValue() {
return (float)sum();
}
@@ -204,6 +209,7 @@ public class LongAdder extends Striped64 implements Serializable {
* Returns the {@link #sum} as a {@code double} after a widening
* primitive conversion.
*/
+ @Override
public double doubleValue() {
return (double)sum();
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
index 9a40aeef13..03ff74575d 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java
@@ -505,17 +505,17 @@ public class OrderedBytes {
x = src.get();
a1 = ord.apply(x) & 0xff;
if (-1 == unsignedCmp(a0, 249)) {
- return (a0 - 241) * 256 + a1 + 240;
+ return (a0 - 241L) * 256 + a1 + 240;
}
x = src.get();
a2 = ord.apply(x) & 0xff;
if (a0 == 249) {
- return 2288 + 256 * a1 + a2;
+ return 2288L + 256 * a1 + a2;
}
x = src.get();
a3 = ord.apply(x) & 0xff;
if (a0 == 250) {
- return (a1 << 16) | (a2 << 8) | a3;
+ return ((long) a1 << 16L) | (a2 << 8) | a3;
}
x = src.get();
a4 = ord.apply(x) & 0xff;
@@ -665,7 +665,8 @@ public class OrderedBytes {
dst.put((byte) ((2 * d + 1) & 0xff));
abs = abs.subtract(BigDecimal.valueOf(d));
}
- a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x
+ // terminal digit should be 2x
+ a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe);
if (isNeg) {
// negative values encoded as ~M
DESCENDING.apply(a, offset + startM, dst.getPosition() - startM);
@@ -749,8 +750,8 @@ public class OrderedBytes {
dst.put((byte) (2 * d + 1));
abs = abs.subtract(BigDecimal.valueOf(d));
}
-
- a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x
+ // terminal digit should be 2x
+ a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe);
if (isNeg) {
// negative values encoded as ~M
DESCENDING.apply(a, offset + startM, dst.getPosition() - startM);
@@ -1065,7 +1066,8 @@ public class OrderedBytes {
if (s > 1) {
dst.put((byte) (0x7f & t));
} else {
- dst.getBytes()[offset + dst.getPosition() - 1] &= 0x7f;
+ dst.getBytes()[offset + dst.getPosition() - 1] =
+ (byte) (dst.getBytes()[offset + dst.getPosition() - 1] & 0x7f);
}
}
ord.apply(dst.getBytes(), offset + start, dst.getPosition() - start);
@@ -1118,7 +1120,7 @@ public class OrderedBytes {
ret.put((byte) (t | ((ord.apply(a[offset + i]) & 0x7f) >>> s)));
}
if (i == end) break;
- t = (byte) ((ord.apply(a[offset + i]) << 8 - s) & 0xff);
+ t = (byte) ((ord.apply(a[offset + i]) << (8 - s)) & 0xff);
s = s == 1 ? 7 : s - 1;
}
src.setPosition(end);
@@ -1374,7 +1376,7 @@ public class OrderedBytes {
public static int encodeFloat32(PositionedByteRange dst, float val, Order ord) {
final int offset = dst.getOffset(), start = dst.getPosition();
int i = Float.floatToIntBits(val);
- i ^= ((i >> Integer.SIZE - 1) | Integer.MIN_VALUE);
+ i ^= ((i >> (Integer.SIZE - 1)) | Integer.MIN_VALUE);
dst.put(FIXED_FLOAT32)
.put((byte) (i >> 24))
.put((byte) (i >> 16))
@@ -1396,7 +1398,7 @@ public class OrderedBytes {
for (int i = 1; i < 4; i++) {
val = (val << 8) + (ord.apply(src.get()) & 0xff);
}
- val ^= (~val >> Integer.SIZE - 1) | Integer.MIN_VALUE;
+ val ^= (~val >> (Integer.SIZE - 1)) | Integer.MIN_VALUE;
return Float.intBitsToFloat(val);
}
@@ -1468,7 +1470,7 @@ public class OrderedBytes {
public static int encodeFloat64(PositionedByteRange dst, double val, Order ord) {
final int offset = dst.getOffset(), start = dst.getPosition();
long lng = Double.doubleToLongBits(val);
- lng ^= ((lng >> Long.SIZE - 1) | Long.MIN_VALUE);
+ lng ^= ((lng >> (Long.SIZE - 1)) | Long.MIN_VALUE);
dst.put(FIXED_FLOAT64)
.put((byte) (lng >> 56))
.put((byte) (lng >> 48))
@@ -1494,7 +1496,7 @@ public class OrderedBytes {
for (int i = 1; i < 8; i++) {
val = (val << 8) + (ord.apply(src.get()) & 0xff);
}
- val ^= (~val >> Long.SIZE - 1) | Long.MIN_VALUE;
+ val ^= (~val >> (Long.SIZE - 1)) | Long.MIN_VALUE;
return Double.longBitsToDouble(val);
}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
index 36f2fce59f..02b9b3f4fe 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java
@@ -338,6 +338,7 @@ abstract class Striped64 extends Number {
try {
return java.security.AccessController.doPrivileged
(new java.security.PrivilegedExceptionAction() {
+ @Override
public sun.misc.Unsafe run() throws Exception {
Class k = sun.misc.Unsafe.class;
for (java.lang.reflect.Field f : k.getDeclaredFields()) {
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
index 5c2bc1281d..fa02b25dcb 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java
@@ -18,11 +18,13 @@
*/
package org.apache.hadoop.hbase.util;
+import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.lang.Thread.UncaughtExceptionHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
@@ -298,7 +300,8 @@ public class Threads {
if (PRINT_THREAD_INFO_METHOD_WITH_PRINTSTREAM) {
PRINT_THREAD_INFO_METHOD.invoke(null, stream, title);
} else {
- PRINT_THREAD_INFO_METHOD.invoke(null, new PrintWriter(stream), title);
+ PRINT_THREAD_INFO_METHOD.invoke(null,
+ new PrintWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8)), title);
}
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException(e.getCause());
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
index 1438ab7c55..059ed0ef8b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java
@@ -40,6 +40,7 @@ public class Triple {
return new Triple(first, second, third);
}
+ @Override
public int hashCode() {
int hashFirst = (first != null ? first.hashCode() : 0);
int hashSecond = (second != null ? second.hashCode() : 0);
@@ -48,6 +49,7 @@ public class Triple {
return (hashFirst >> 1) ^ hashSecond ^ (hashThird << 1);
}
+ @Override
public boolean equals(Object obj) {
if (!(obj instanceof Triple)) {
return false;
@@ -65,6 +67,7 @@ public class Triple {
return true;
}
+ @Override
public String toString() {
return "(" + first + ", " + second + "," + third + " )";
}
@@ -93,6 +96,3 @@ public class Triple {
this.third = third;
}
}
-
-
-
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
index 97bce75ac5..9ebff7d8b4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java
@@ -89,7 +89,7 @@ public final class UnsafeAccess {
destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
}
- long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET;
+ long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET;
unsafeCopy(src, srcAddress, destBase, destAddress, length);
}
@@ -123,7 +123,7 @@ public final class UnsafeAccess {
srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset();
srcBase = src.array();
}
- long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET;
+ long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET;
unsafeCopy(srcBase, srcAddress, dest, destAddress, length);
}
@@ -144,13 +144,13 @@ public final class UnsafeAccess {
if (src.isDirect()) {
srcAddress = srcOffset + ((DirectBuffer) src).address();
} else {
- srcAddress = srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET;
+ srcAddress = (long) srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET;
srcBase = src.array();
}
if (dest.isDirect()) {
destAddress = destOffset + ((DirectBuffer) dest).address();
} else {
- destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
+ destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset();
destBase = dest.array();
}
unsafeCopy(srcBase, srcAddress, destBase, destAddress, length);
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
index 2e0436ca01..a28869279f 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java
@@ -287,7 +287,7 @@ public class ClassFinder {
return null;
}
- private class FileFilterWithName implements FileFilter {
+ private static class FileFilterWithName implements FileFilter {
private FileNameFilter nameFilter;
public FileFilterWithName(FileNameFilter nameFilter) {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index b7361bfcf8..2cdab5d380 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -203,6 +203,7 @@ public class HBaseCommonTestingUtility {
LOG.warn("Failed to delete " + dir.getAbsolutePath(), ex);
}
} while (ntries < 30);
- return ntries < 30;
+
+ return false;
}
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
index 435f8bb443..7080078559 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
import static org.junit.Assert.*;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.NavigableMap;
@@ -52,7 +53,7 @@ public class TestCellUtil {
/**
* CellScanner used in test.
*/
- private class TestCellScanner implements CellScanner {
+ private static class TestCellScanner implements CellScanner {
private int count = 0;
private Cell current = null;
private final int cellsCount;
@@ -80,7 +81,7 @@ public class TestCellUtil {
/**
* Cell used in test. Has row only.
*/
- private class TestCell implements Cell {
+ private static class TestCell implements Cell {
private final byte [] row;
TestCell(final int i) {
@@ -331,7 +332,8 @@ public class TestCellUtil {
@Test
public void testFindCommonPrefixInFlatKey() {
// The whole key matching case
- KeyValue kv1 = new KeyValue("r1".getBytes(), "f1".getBytes(), "q1".getBytes(), null);
+ KeyValue kv1 = new KeyValue("r1".getBytes(StandardCharsets.UTF_8),
+ "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
Assert.assertEquals(kv1.getKeyLength(),
CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, true));
Assert.assertEquals(kv1.getKeyLength(),
@@ -339,30 +341,35 @@ public class TestCellUtil {
Assert.assertEquals(kv1.getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE,
CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, false));
// The rk length itself mismatch
- KeyValue kv2 = new KeyValue("r12".getBytes(), "f1".getBytes(), "q1".getBytes(), null);
+ KeyValue kv2 = new KeyValue("r12".getBytes(StandardCharsets.UTF_8),
+ "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
Assert.assertEquals(1, CellUtil.findCommonPrefixInFlatKey(kv1, kv2, true, true));
// part of rk is same
- KeyValue kv3 = new KeyValue("r14".getBytes(), "f1".getBytes(), "q1".getBytes(), null);
- Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes().length,
+ KeyValue kv3 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
+ "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
+ Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes(StandardCharsets.UTF_8).length,
CellUtil.findCommonPrefixInFlatKey(kv2, kv3, true, true));
// entire rk is same but different cf name
- KeyValue kv4 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q1".getBytes(), null);
+ KeyValue kv4 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
+ "f2".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null);
Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
- + "f".getBytes().length, CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true));
+ + "f".getBytes(StandardCharsets.UTF_8).length,
+ CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true));
// rk and family are same and part of qualifier
- KeyValue kv5 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q123".getBytes(), null);
+ KeyValue kv5 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8),
+ "f2".getBytes(StandardCharsets.UTF_8), "q123".getBytes(StandardCharsets.UTF_8), null);
Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
+ kv4.getFamilyLength() + kv4.getQualifierLength(),
CellUtil.findCommonPrefixInFlatKey(kv4, kv5, true, true));
// rk, cf and q are same. ts differs
- KeyValue kv6 = new KeyValue("rk".getBytes(), 1234L);
- KeyValue kv7 = new KeyValue("rk".getBytes(), 1235L);
+ KeyValue kv6 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L);
+ KeyValue kv7 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1235L);
// only last byte out of 8 ts bytes in ts part differs
Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
+ kv6.getFamilyLength() + kv6.getQualifierLength() + 7,
CellUtil.findCommonPrefixInFlatKey(kv6, kv7, true, true));
// rk, cf, q and ts are same. Only type differs
- KeyValue kv8 = new KeyValue("rk".getBytes(), 1234L, Type.Delete);
+ KeyValue kv8 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L, Type.Delete);
Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE
+ kv6.getFamilyLength() + kv6.getQualifierLength() + KeyValue.TIMESTAMP_SIZE,
CellUtil.findCommonPrefixInFlatKey(kv6, kv8, true, true));
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
index e5546f6cb1..abca0d7425 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java
@@ -297,10 +297,10 @@ public class TestChoreService {
}
};
- assertEquals("Name construction failed", chore1.getName(), NAME);
- assertEquals("Period construction failed", chore1.getPeriod(), PERIOD);
- assertEquals("Initial Delay construction failed", chore1.getInitialDelay(), VALID_DELAY);
- assertEquals("TimeUnit construction failed", chore1.getTimeUnit(), UNIT);
+ assertEquals("Name construction failed", NAME, chore1.getName());
+ assertEquals("Period construction failed", PERIOD, chore1.getPeriod());
+ assertEquals("Initial Delay construction failed", VALID_DELAY, chore1.getInitialDelay());
+ assertEquals("TimeUnit construction failed", UNIT, chore1.getTimeUnit());
ScheduledChore invalidDelayChore =
new ScheduledChore(NAME, new SampleStopper(), PERIOD, INVALID_DELAY, UNIT) {
@@ -475,7 +475,7 @@ public class TestChoreService {
Thread.sleep(chorePeriod * 10);
assertEquals("Chores are missing their start time. Should expand core pool size",
service.getNumberOfScheduledChores(), service.getCorePoolSize());
- assertEquals(service.getNumberOfChoresMissingStartTime(), 5);
+ assertEquals(5, service.getNumberOfChoresMissingStartTime());
// Now we begin to cancel the chores that caused an increase in the core thread pool of the
// ChoreService. These cancellations should cause a decrease in the core thread pool.
@@ -483,31 +483,31 @@ public class TestChoreService {
Thread.sleep(chorePeriod * 10);
assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
service.getCorePoolSize());
- assertEquals(service.getNumberOfChoresMissingStartTime(), 4);
+ assertEquals(4, service.getNumberOfChoresMissingStartTime());
slowChore4.cancel();
Thread.sleep(chorePeriod * 10);
assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
service.getCorePoolSize());
- assertEquals(service.getNumberOfChoresMissingStartTime(), 3);
+ assertEquals(3, service.getNumberOfChoresMissingStartTime());
slowChore3.cancel();
Thread.sleep(chorePeriod * 10);
assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
service.getCorePoolSize());
- assertEquals(service.getNumberOfChoresMissingStartTime(), 2);
+ assertEquals(2, service.getNumberOfChoresMissingStartTime());
slowChore2.cancel();
Thread.sleep(chorePeriod * 10);
assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
service.getCorePoolSize());
- assertEquals(service.getNumberOfChoresMissingStartTime(), 1);
+ assertEquals(1, service.getNumberOfChoresMissingStartTime());
slowChore1.cancel();
Thread.sleep(chorePeriod * 10);
assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()),
service.getCorePoolSize());
- assertEquals(service.getNumberOfChoresMissingStartTime(), 0);
+ assertEquals(0, service.getNumberOfChoresMissingStartTime());
} finally {
shutdownService(service);
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
index 2a8d1a29b0..1aa052b323 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java
@@ -191,10 +191,9 @@ public class TestHBaseConfiguration {
}
// Instantiate Hadoop CredentialProviderFactory
try {
- hadoopCredProviderFactory = hadoopCredProviderFactoryClz.newInstance();
- } catch (InstantiationException e) {
- return false;
- } catch (IllegalAccessException e) {
+ hadoopCredProviderFactory =
+ hadoopCredProviderFactoryClz.getDeclaredConstructor().newInstance();
+ } catch (Exception e) {
return false;
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
index 3baf729ded..7d3aa0e87e 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
@@ -640,7 +640,7 @@ public class TestKeyValue extends TestCase {
assertTrue(kvA2.equals(deSerKV2));
}
- private class MockKeyValue implements Cell {
+ private static class MockKeyValue implements Cell {
private final KeyValue kv;
public MockKeyValue(KeyValue kv) {
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
index 6c18dc0689..802376580d 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java
@@ -67,7 +67,7 @@ public class TestKeyValueCodec {
Codec.Encoder encoder = kvc.getEncoder(dos);
final KeyValue kv =
new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v"));
- final long length = kv.getLength() + Bytes.SIZEOF_INT;
+ final long length = (long) kv.getLength() + Bytes.SIZEOF_INT;
encoder.write(kv);
encoder.flush();
dos.close();
@@ -97,7 +97,7 @@ public class TestKeyValueCodec {
new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2"));
final KeyValue kv3 =
new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3"));
- final long length = kv1.getLength() + Bytes.SIZEOF_INT;
+ final long length = (long) kv1.getLength() + Bytes.SIZEOF_INT;
encoder.write(kv1);
encoder.write(kv2);
encoder.write(kv3);
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
index 95f8ba1f35..9b444f5718 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java
@@ -131,8 +131,8 @@ public class TestCipherProvider {
Cipher a = Encryption.getCipher(conf, "TEST");
assertNotNull(a);
assertTrue(a.getProvider() instanceof MyCipherProvider);
- assertEquals(a.getName(), "TEST");
- assertEquals(a.getKeyLength(), 0);
+ assertEquals("TEST", a.getName());
+ assertEquals(0, a.getKeyLength());
}
@Test
@@ -147,7 +147,7 @@ public class TestCipherProvider {
assertNotNull(a);
assertTrue(a.getProvider() instanceof DefaultCipherProvider);
assertEquals(a.getName(), algorithm);
- assertEquals(a.getKeyLength(), AES.KEY_LENGTH);
+ assertEquals(AES.KEY_LENGTH, a.getKeyLength());
}
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
index 9c98272ad8..fcb0b51384 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java
@@ -44,9 +44,9 @@ public class TestKeyProvider {
Key key = provider.getKey("foo");
assertNotNull("Test provider did not return a key as expected", key);
- assertEquals("Test provider did not create a key for AES", key.getAlgorithm(), "AES");
- assertEquals("Test provider did not create a key of adequate length",
- key.getEncoded().length, AES.KEY_LENGTH);
+ assertEquals("Test provider did not create a key for AES", "AES", key.getAlgorithm());
+ assertEquals("Test provider did not create a key of adequate length", AES.KEY_LENGTH,
+ key.getEncoded().length);
}
}
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
index 9e381033a2..bc6edb81bd 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.io.FileOutputStream;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.security.Key;
import java.security.KeyStore;
import java.security.MessageDigest;
@@ -51,7 +52,7 @@ public class TestKeyStoreKeyProvider {
@BeforeClass
public static void setUp() throws Exception {
- KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes());
+ KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes(StandardCharsets.UTF_8));
// Create a JKECS store containing a test secret key
KeyStore store = KeyStore.getInstance("JCEKS");
store.load(null, PASSWORD.toCharArray());
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
index 65260ea6da..55cad54ff5 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java
@@ -53,8 +53,8 @@ public class TestAES {
public void testAESAlgorithm() throws Exception {
Configuration conf = HBaseConfiguration.create();
Cipher aes = Encryption.getCipher(conf, "AES");
- assertEquals(aes.getKeyLength(), AES.KEY_LENGTH);
- assertEquals(aes.getIvLength(), AES.IV_LENGTH);
+ assertEquals(AES.KEY_LENGTH, aes.getKeyLength());
+ assertEquals(AES.IV_LENGTH, aes.getIvLength());
Encryptor e = aes.getEncryptor();
e.setKey(new SecretKeySpec(Bytes.fromHex("2b7e151628aed2a6abf7158809cf4f3c"), "AES"));
e.setIv(Bytes.fromHex("f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff"));
@@ -89,8 +89,7 @@ public class TestAES {
DefaultCipherProvider.getInstance().setConf(conf);
AES aes = new AES(DefaultCipherProvider.getInstance());
- assertEquals("AES did not find alternate RNG", aes.getRNG().getAlgorithm(),
- "TestRNG");
+ assertEquals("AES did not find alternate RNG", "TestRNG", aes.getRNG().getAlgorithm());
}
static class TestProvider extends Provider {
@@ -98,6 +97,7 @@ public class TestAES {
public TestProvider() {
super("TEST", 1.0, "Test provider");
AccessController.doPrivileged(new PrivilegedAction