diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java index 3ca55fcbad..10637d68a8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java @@ -172,7 +172,7 @@ public class ClusterStatus extends VersionedWritable { int count = 0; if (liveServers != null && !liveServers.isEmpty()) { for (Map.Entry e: this.liveServers.entrySet()) { - count += e.getValue().getNumberOfRequests(); + count = (int) (count + e.getValue().getNumberOfRequests()); } } return count; @@ -188,6 +188,7 @@ public class ClusterStatus extends VersionedWritable { /** * @see java.lang.Object#equals(java.lang.Object) */ + @Override public boolean equals(Object o) { if (this == o) { return true; @@ -208,6 +209,7 @@ public class ClusterStatus extends VersionedWritable { /** * @see java.lang.Object#hashCode() */ + @Override public int hashCode() { return VERSION + hbaseVersion.hashCode() + this.liveServers.hashCode() + this.deadServers.hashCode() + this.master.hashCode() + @@ -215,6 +217,7 @@ public class ClusterStatus extends VersionedWritable { } /** @return the object version number */ + @Override public byte getVersion() { return VERSION; } @@ -322,6 +325,7 @@ public class ClusterStatus extends VersionedWritable { return balancerOn; } + @Override public String toString() { StringBuilder sb = new StringBuilder(1024); sb.append("Master: " + master); @@ -440,7 +444,7 @@ public class ClusterStatus extends VersionedWritable { public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) { Map servers = null; - if (proto.getLiveServersList() != null) { + if (!proto.getLiveServersList().isEmpty()) { servers = new HashMap(proto.getLiveServersList().size()); for (LiveServerInfo lsi : proto.getLiveServersList()) { servers.put(ProtobufUtil.toServerName( @@ -449,7 +453,7 @@ public class ClusterStatus extends VersionedWritable { } Collection deadServers = null; - if (proto.getDeadServersList() != null) { + if (!proto.getDeadServersList().isEmpty()) { deadServers = new ArrayList(proto.getDeadServersList().size()); for (HBaseProtos.ServerName sn : proto.getDeadServersList()) { deadServers.add(ProtobufUtil.toServerName(sn)); @@ -457,7 +461,7 @@ public class ClusterStatus extends VersionedWritable { } Collection backupMasters = null; - if (proto.getBackupMastersList() != null) { + if (!proto.getBackupMastersList().isEmpty()) { backupMasters = new ArrayList(proto.getBackupMastersList().size()); for (HBaseProtos.ServerName sn : proto.getBackupMastersList()) { backupMasters.add(ProtobufUtil.toServerName(sn)); @@ -465,7 +469,7 @@ public class ClusterStatus extends VersionedWritable { } Set rit = null; - if (proto.getRegionsInTransitionList() != null) { + if (!proto.getRegionsInTransitionList().isEmpty()) { rit = new HashSet(proto.getRegionsInTransitionList().size()); for (RegionInTransition region : proto.getRegionsInTransitionList()) { RegionState value = RegionState.convert(region.getRegionState()); @@ -474,7 +478,7 @@ public class ClusterStatus extends VersionedWritable { } String[] masterCoprocessors = null; - if (proto.getMasterCoprocessorsList() != null) { + if (!proto.getMasterCoprocessorsList().isEmpty()) { final int numMasterCoprocessors = proto.getMasterCoprocessorsCount(); masterCoprocessors = new String[numMasterCoprocessors]; for (int i = 0; i < numMasterCoprocessors; i++) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 9292633704..560b74b2b6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -1284,6 +1284,7 @@ public class HColumnDescriptor implements WritableComparable * @deprecated Writables are going away. Use pb {@link #parseFrom(byte[])} instead. */ @Deprecated + @Override public void readFields(DataInput in) throws IOException { int version = in.readByte(); if (version < 6) { @@ -1367,6 +1368,7 @@ public class HColumnDescriptor implements WritableComparable * @deprecated Writables are going away. Use {@link #toByteArray()} instead. */ @Deprecated + @Override public void write(DataOutput out) throws IOException { out.writeByte(COLUMN_DESCRIPTOR_VERSION); Bytes.writeByteArray(out, this.name); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java index 09ceeb9ec4..8d93655041 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java @@ -235,7 +235,7 @@ public class HRegionInfo implements Comparable { private void setHashCode() { int result = Arrays.hashCode(this.regionName); - result ^= this.regionId; + result = (int) (result ^ this.regionId); result ^= Arrays.hashCode(this.startKey); result ^= Arrays.hashCode(this.endKey); result ^= Boolean.valueOf(this.offLine).hashCode(); @@ -996,15 +996,6 @@ public class HRegionInfo implements Comparable { } /** - * Convert a HRegionInfo to the protobuf RegionInfo - * - * @return the converted RegionInfo - */ - RegionInfo convert() { - return convert(this); - } - - /** * Convert a HRegionInfo to a RegionInfo * * @param info the HRegionInfo to convert @@ -1070,7 +1061,7 @@ public class HRegionInfo implements Comparable { * @see #parseFrom(byte[]) */ public byte [] toByteArray() { - byte [] bytes = convert().toByteArray(); + byte [] bytes = convert(this).toByteArray(); return ProtobufUtil.prependPBMagic(bytes); } @@ -1148,7 +1139,7 @@ public class HRegionInfo implements Comparable { * @see #toByteArray() */ public byte [] toDelimitedByteArray() throws IOException { - return ProtobufUtil.toDelimitedByteArray(convert()); + return ProtobufUtil.toDelimitedByteArray(convert(this)); } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java index 492633c188..b6b8bd513b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public @InterfaceStability.Evolving public class InvalidFamilyOperationException extends DoNotRetryIOException { - private static final long serialVersionUID = 1L << 22 - 1L; + private static final long serialVersionUID = (1L << 22) - 1L; /** default constructor */ public InvalidFamilyOperationException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java index a85b1641c6..567f30f832 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java @@ -29,7 +29,7 @@ import java.io.IOException; @InterfaceAudience.Public @InterfaceStability.Stable public class MasterNotRunningException extends IOException { - private static final long serialVersionUID = 1L << 23 - 1L; + private static final long serialVersionUID = (1L << 23) - 1L; /** default constructor */ public MasterNotRunningException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java index 1523ff632c..448c8a9c99 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java @@ -31,7 +31,7 @@ import java.io.IOException; @InterfaceAudience.Public @InterfaceStability.Stable public class NotServingRegionException extends IOException { - private static final long serialVersionUID = 1L << 17 - 1L; + private static final long serialVersionUID = (1L << 17) - 1L; /** default constructor */ public NotServingRegionException() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java index 211de17f24..34836de17e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionLocations.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase; import java.util.Collection; +import java.util.Objects; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -214,7 +215,7 @@ public class RegionLocations { HRegionLocation selectedLoc = selectRegionLocation(thisLoc, otherLoc, true, false); - if (selectedLoc != thisLoc) { + if (!Objects.equals(selectedLoc, thisLoc)) { if (newLocations == null) { newLocations = new HRegionLocation[max]; System.arraycopy(locations, 0, newLocations, 0, i); @@ -277,7 +278,7 @@ public class RegionLocations { HRegionLocation selectedLoc = selectRegionLocation(oldLoc, location, checkForEquals, force); - if (selectedLoc == oldLoc) { + if (Objects.equals(selectedLoc, oldLoc)) { return this; } HRegionLocation[] newLocations = new HRegionLocation[Math.max(locations.length, replicaId +1)]; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java index 339f58806a..97e78c92ad 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -287,11 +287,27 @@ public class ServerName implements Comparable, Serializable { @Override public int compareTo(ServerName other) { - int compare = this.getHostname().compareToIgnoreCase(other.getHostname()); - if (compare != 0) return compare; + int compare; + if (other == null) { + return -1; + } + if (this.getHostname() == null) { + if (other.getHostname() != null) { + return 1; + } + } else { + if (other.getHostname() == null) { + return -1; + } + compare = this.getHostname().compareToIgnoreCase(other.getHostname()); + if (compare != 0) { + return compare; + } + } compare = this.getPort() - other.getPort(); - if (compare != 0) return compare; - + if (compare != 0) { + return compare; + } return Long.compare(this.getStartcode(), other.getStartcode()); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java index 740eb901f9..1a394979eb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableExistsException.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.TableName; @InterfaceAudience.Public @InterfaceStability.Stable public class TableExistsException extends DoNotRetryIOException { - private static final long serialVersionUID = 1L << 7 - 1L; + private static final long serialVersionUID = (1L << 7) - 1L; /** default constructor */ public TableExistsException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java index ea707bf317..9b11122006 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Stable public class TableNotDisabledException extends DoNotRetryIOException { - private static final long serialVersionUID = 1L << 19 - 1L; + private static final long serialVersionUID = (1L << 19) - 1L; /** default constructor */ public TableNotDisabledException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java index 7aebf33af4..773a64b2df 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java @@ -29,7 +29,7 @@ import java.io.IOException; @InterfaceAudience.Public @InterfaceStability.Evolving public class ZooKeeperConnectionException extends IOException { - private static final long serialVersionUID = 1L << 23 - 1L; + private static final long serialVersionUID = (1L << 23) - 1L; /** default constructor */ public ZooKeeperConnectionException() { super(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java index 6144570e08..de1b7fee94 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java @@ -52,6 +52,7 @@ public class Append extends Mutation { * A client that is not interested in the result can save network * bandwidth setting this to false. */ + @Override public Append setReturnResults(boolean returnResults) { super.setReturnResults(returnResults); return this; @@ -61,6 +62,7 @@ public class Append extends Mutation { * @return current setting for returnResults */ // This method makes public the superclasses's protected method. + @Override public boolean isReturnResults() { return super.isReturnResults(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java index 44a7656e57..42df848fb4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java @@ -103,6 +103,8 @@ import org.apache.htrace.Trace; *

*/ @InterfaceAudience.Private +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Synchronization on tasks in progress counter is intended") class AsyncProcess { private static final Log LOG = LogFactory.getLog(AsyncProcess.class); protected static final AtomicLong COUNTER = new AtomicLong(); @@ -380,7 +382,7 @@ class AsyncProcess { // we will do more retries in aggregate, but the user will be none the wiser. this.serverTrackerTimeout = 0; for (int i = 0; i < this.numTries; ++i) { - serverTrackerTimeout += ConnectionUtils.getPauseTime(this.pause, i); + serverTrackerTimeout = (int) (serverTrackerTimeout + ConnectionUtils.getPauseTime(this.pause, i)); } this.rpcCallerFactory = rpcCaller; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java index 3cbdfb3f7a..20490790e7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java @@ -108,6 +108,7 @@ class FastFailInterceptorContext extends this.tries = tries; } + @Override public void clear() { server = null; fInfo = null; @@ -118,10 +119,12 @@ class FastFailInterceptorContext extends tries = 0; } + @Override public FastFailInterceptorContext prepare(RetryingCallable callable) { return prepare(callable, 0); } + @Override public FastFailInterceptorContext prepare(RetryingCallable callable, int tries) { if (callable instanceof RegionServerCallable) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java index f8c34bd047..6e5ff884d4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java @@ -243,6 +243,7 @@ public class Get extends Query return this; } + @Override public Get setLoadColumnFamiliesOnDemand(boolean value) { return (Get) super.setLoadColumnFamiliesOnDemand(value); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 0925e38bc0..051a768e84 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -24,6 +24,7 @@ import com.google.protobuf.ServiceException; import java.io.IOException; import java.io.InterruptedIOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -2460,6 +2461,7 @@ public class HBaseAdmin implements Admin { * * @return true if region normalizer is enabled, false otherwise. */ + @Override public boolean isNormalizerEnabled() throws IOException { return executeCallable(new MasterCallable(getConnection()) { @Override @@ -2478,6 +2480,7 @@ public class HBaseAdmin implements Admin { * * @return Previous normalizer value */ + @Override public boolean setNormalizerRunning(final boolean on) throws IOException { return executeCallable(new MasterCallable(getConnection()) { @Override @@ -2608,10 +2611,10 @@ public class HBaseAdmin implements Admin { public void mergeRegions(final byte[] nameOfRegionA, final byte[] nameOfRegionB, final boolean forcible) throws IOException { - final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ? - nameOfRegionA : HRegionInfo.encodeRegionName(nameOfRegionA).getBytes(); - final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ? - nameOfRegionB : HRegionInfo.encodeRegionName(nameOfRegionB).getBytes(); + final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ? nameOfRegionA : + HRegionInfo.encodeRegionName(nameOfRegionA).getBytes(StandardCharsets.UTF_8); + final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ? nameOfRegionB : + HRegionInfo.encodeRegionName(nameOfRegionB).getBytes(StandardCharsets.UTF_8); Pair pair = getRegion(nameOfRegionA); if (pair != null && pair.getFirst().getReplicaId() != HRegionInfo.DEFAULT_REPLICA_ID) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java index f37690ca70..3d3ad33589 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HConnectionKey.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HConstants; @@ -124,8 +125,7 @@ class HConnectionKey { for (String property : CONNECTION_PROPERTIES) { String thisValue = this.properties.get(property); String thatValue = that.properties.get(property); - //noinspection StringEquality - if (thisValue == thatValue) { + if (Objects.equals(thisValue, thatValue)) { continue; } if (thisValue == null || !thisValue.equals(thatValue)) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index e9531f32a4..5fb9e63e4b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -1808,6 +1808,7 @@ public class HTable implements HTableInterface, RegionLocator { return getKeysAndRegionsInRange(start, end, true).getFirst(); } + @Override public void setOperationTimeout(int operationTimeout) { this.operationTimeout = operationTimeout; if (mutator != null) { @@ -1816,6 +1817,7 @@ public class HTable implements HTableInterface, RegionLocator { multiAp.setOperationTimeout(operationTimeout); } + @Override public int getOperationTimeout() { return operationTimeout; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java index 7b2b1368e7..21e3ce67a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTableMultiplexer.java @@ -468,7 +468,7 @@ public class HTableMultiplexer { } public long getTotalBufferedCount() { - return queue.size() + currentProcessingCount.get(); + return (long) queue.size() + currentProcessingCount.get(); } public AtomicAverageCounter getAverageLatencyCounter() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java index 502703bde1..d2193067b7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java @@ -308,6 +308,7 @@ public class HTablePool implements Closeable { *

* Note: this is a 'shutdown' of all the table pools. */ + @Override public void close() throws IOException { for (String tableName : tables.keySet()) { closeTablePool(tableName); @@ -524,6 +525,7 @@ public class HTablePool implements Closeable { * * @throws IOException */ + @Override public void close() throws IOException { checkState(); open = false; @@ -635,7 +637,8 @@ public class HTablePool implements Closeable { private void checkState() { if (!isOpen()) { - throw new IllegalStateException("Table=" + new String(table.getTableName()) + " already closed"); + throw new IllegalStateException("Table=" + table.getName().getNameAsString() + + " already closed"); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java index 48f2fd604e..76759b763c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java @@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.util.ClassSize; @InterfaceAudience.Public @InterfaceStability.Stable public class Increment extends Mutation implements Comparable { - private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + ClassSize.TIMERANGE; + private static final long HEAP_OVERHEAD = (long) ClassSize.REFERENCE + ClassSize.TIMERANGE; private TimeRange tr = new TimeRange(); /** @@ -164,6 +164,7 @@ public class Increment extends Mutation implements Comparable { * client that is not interested in the result can save network bandwidth setting this * to false. */ + @Override public Increment setReturnResults(boolean returnResults) { super.setReturnResults(returnResults); return this; @@ -173,6 +174,7 @@ public class Increment extends Mutation implements Comparable { * @return current setting for returnResults */ // This method makes public the superclasses's protected method. + @Override public boolean isReturnResults() { return super.isReturnResults(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java index 126b1175a8..b9ebac3258 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public @InterfaceStability.Stable public class NoServerForRegionException extends DoNotRetryRegionException { - private static final long serialVersionUID = 1L << 11 - 1L; + private static final long serialVersionUID = (1L << 11) - 1L; /** default constructor */ public NoServerForRegionException() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java index 7ac4546f64..64b44a9a90 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/PerClientRandomNonceGenerator.java @@ -38,10 +38,12 @@ public class PerClientRandomNonceGenerator implements NonceGenerator { this.clientId = (((long)Arrays.hashCode(clientIdBase)) << 32) + rdm.nextInt(); } + @Override public long getNonceGroup() { return this.clientId; } + @Override public long newNonce() { long result = HConstants.NO_NONCE; do { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java index d7aa73900d..789e2e1447 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegistryFactory.java @@ -38,7 +38,7 @@ class RegistryFactory { ZooKeeperRegistry.class.getName()); Registry registry = null; try { - registry = (Registry)Class.forName(registryClass).newInstance(); + registry = (Registry)Class.forName(registryClass).getDeclaredConstructor().newInstance(); } catch (Throwable t) { throw new IOException(t); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java index b03595a2d6..af24d5c0ca 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RpcRetryingCaller.java @@ -66,6 +66,7 @@ public class RpcRetryingCaller { private final long pauseForCQTBE; private final int retries; private final int rpcTimeout;// timeout for each rpc request + private final Object lock = new Object(); private final AtomicBoolean cancelled = new AtomicBoolean(false); private final RetryingCallerInterceptor interceptor; private final RetryingCallerInterceptorContext context; @@ -105,16 +106,16 @@ public class RpcRetryingCaller { private int getTimeout(int callTimeout){ int timeout = getRemainingTime(callTimeout); - if (timeout <= 0 || rpcTimeout > 0 && rpcTimeout < timeout){ + if (timeout <= 0 || (rpcTimeout > 0 && rpcTimeout < timeout)){ timeout = rpcTimeout; } return timeout; } public void cancel(){ - synchronized (cancelled){ + synchronized (lock){ cancelled.set(true); - cancelled.notifyAll(); + lock.notifyAll(); } } @@ -181,9 +182,9 @@ public class RpcRetryingCaller { } try { if (expectedSleep > 0) { - synchronized (cancelled) { + synchronized (lock) { if (cancelled.get()) return null; - cancelled.wait(expectedSleep); + lock.wait(expectedSleep); } } if (cancelled.get()) return null; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 1340602358..541b3d8f1d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -871,6 +871,7 @@ public class Scan extends Query { return allowPartialResults; } + @Override public Scan setLoadColumnFamiliesOnDemand(boolean value) { return (Scan) super.setLoadColumnFamiliesOnDemand(value); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java index 3ea3802d7e..5e1d16261a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hbase.util.Bytes; public class LongColumnInterpreter extends ColumnInterpreter { + @Override public Long getValue(byte[] colFamily, byte[] colQualifier, Cell kv) throws IOException { if (kv == null || kv.getValueLength() != Bytes.SIZEOF_LONG) @@ -49,7 +50,7 @@ public class LongColumnInterpreter extends ColumnInterpreter clazz = Class.forName(filter); + Object o = clazz.getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new ReplicationException("Configured WALEntryFilter " + filter + " could not be created. Failing add/update " + "peer operation.", e); @@ -783,12 +784,12 @@ public class ReplicationAdmin implements Closeable { * @see java.lang.Object#equals(java.lang.Object) */ private boolean compareForReplication(HTableDescriptor peerHtd, HTableDescriptor localHtd) { - if (peerHtd == localHtd) { - return true; - } if (peerHtd == null) { return false; } + if (peerHtd.equals(localHtd)) { + return true; + } boolean result = false; // Create a copy of peer HTD as we need to change its replication diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java index 1847b2ecd7..5fc97ba7b7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/security/SecurityCapability.java @@ -34,7 +34,7 @@ public enum SecurityCapability { CELL_AUTHORIZATION(3), CELL_VISIBILITY(4); - private int value; + private final int value; public int getValue() { return value; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java index d5c2613f5f..09fe7e1ed5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java @@ -50,6 +50,7 @@ public class BinaryComparator extends ByteArrayComparable { /** * @return The comparator serialized using pb */ + @Override public byte [] toByteArray() { ComparatorProtos.BinaryComparator.Builder builder = ComparatorProtos.BinaryComparator.newBuilder(); @@ -79,6 +80,7 @@ public class BinaryComparator extends ByteArrayComparable { * @return true if and only if the fields of the comparator that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(ByteArrayComparable other) { if (other == this) return true; if (!(other instanceof BinaryComparator)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java index c05eb8f42f..366b2f9c0b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java @@ -52,6 +52,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable { /** * @return The comparator serialized using pb */ + @Override public byte [] toByteArray() { ComparatorProtos.BinaryPrefixComparator.Builder builder = ComparatorProtos.BinaryPrefixComparator.newBuilder(); @@ -81,6 +82,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable { * @return true if and only if the fields of the comparator that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(ByteArrayComparable other) { if (other == this) return true; if (!(other instanceof BinaryPrefixComparator)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java index 0b7c52db9b..07af22f91a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java @@ -66,6 +66,7 @@ public class BitComparator extends ByteArrayComparable { /** * @return The comparator serialized using pb */ + @Override public byte [] toByteArray() { ComparatorProtos.BitComparator.Builder builder = ComparatorProtos.BitComparator.newBuilder(); @@ -99,6 +100,7 @@ public class BitComparator extends ByteArrayComparable { * @return true if and only if the fields of the comparator that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(ByteArrayComparable other) { if (other == this) return true; if (!(other instanceof BitComparator)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index 18f49f6d45..d2f6ec84d4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -84,6 +84,7 @@ public class ColumnCountGetFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.ColumnCountGetFilter.Builder builder = FilterProtos.ColumnCountGetFilter.newBuilder(); @@ -113,6 +114,7 @@ public class ColumnCountGetFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof ColumnCountGetFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 6f297fbdc0..806863af07 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -174,6 +174,7 @@ public class ColumnPaginationFilter extends FilterBase /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.ColumnPaginationFilter.Builder builder = FilterProtos.ColumnPaginationFilter.newBuilder(); @@ -213,6 +214,7 @@ public class ColumnPaginationFilter extends FilterBase * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof ColumnPaginationFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index 6a9e6e9799..eb0be8d62c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -99,6 +99,7 @@ public class ColumnPrefixFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.ColumnPrefixFilter.Builder builder = FilterProtos.ColumnPrefixFilter.newBuilder(); @@ -128,6 +129,7 @@ public class ColumnPrefixFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof ColumnPrefixFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index 9e6c90f923..9006f87dfc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -143,8 +143,8 @@ public class ColumnRangeFilter extends FilterBase { int cmpMax = Bytes.compareTo(buffer, qualifierOffset, qualifierLength, this.maxColumn, 0, this.maxColumn.length); - if (this.maxColumnInclusive && cmpMax <= 0 || - !this.maxColumnInclusive && cmpMax < 0) { + if ((this.maxColumnInclusive && cmpMax <= 0) || + (!this.maxColumnInclusive && cmpMax < 0)) { return ReturnCode.INCLUDE; } @@ -177,6 +177,7 @@ public class ColumnRangeFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.ColumnRangeFilter.Builder builder = FilterProtos.ColumnRangeFilter.newBuilder(); @@ -211,6 +212,7 @@ public class ColumnRangeFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof ColumnRangeFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index 9987e23b42..b7595d5641 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -170,6 +170,7 @@ public abstract class CompareFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof CompareFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 2843751db8..8582e38b1f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -217,6 +217,7 @@ public class DependentColumnFilter extends CompareFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.DependentColumnFilter.Builder builder = FilterProtos.DependentColumnFilter.newBuilder(); @@ -268,6 +269,7 @@ public class DependentColumnFilter extends CompareFilter { */ @edu.umd.cs.findbugs.annotations.SuppressWarnings( value="RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE") + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof DependentColumnFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index f9722d3d89..9aa078c4bb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -82,6 +82,7 @@ public class FamilyFilter extends CompareFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.FamilyFilter.Builder builder = FilterProtos.FamilyFilter.newBuilder(); @@ -121,6 +122,7 @@ public class FamilyFilter extends CompareFilter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof FamilyFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java index 3c6bcabd2b..812252cc7c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java @@ -142,6 +142,7 @@ public abstract class FilterBase extends Filter { * * {@inheritDoc} */ + @Override public Cell getNextCellHint(Cell currentKV) throws IOException { // Old filters based off of this class will override KeyValue getNextKeyHint(KeyValue). // Thus to maintain compatibility we need to call the old version. @@ -154,6 +155,7 @@ public abstract class FilterBase extends Filter { * * {@inheritDoc} */ + @Override public boolean isFamilyEssential(byte[] name) throws IOException { return true; } @@ -171,6 +173,7 @@ public abstract class FilterBase extends Filter { /** * Return filter's info for debugging and logging purpose. */ + @Override public String toString() { return this.getClass().getSimpleName(); } @@ -178,6 +181,7 @@ public abstract class FilterBase extends Filter { /** * Return length 0 byte array for Filters that don't require special serialization */ + @Override public byte[] toByteArray() throws IOException { return new byte[0]; } @@ -189,6 +193,7 @@ public abstract class FilterBase extends Filter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter other) { return true; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index 0b39b56193..8345fcfdcd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -221,6 +221,7 @@ final public class FilterList extends Filter { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() throws IOException { FilterProtos.FilterList.Builder builder = FilterProtos.FilterList.newBuilder(); builder.setOperator(FilterProtos.FilterList.Operator.valueOf(operator.name())); @@ -262,6 +263,7 @@ final public class FilterList extends Filter { * @return true if and only if the fields of the filter that are serialized are equal to the * corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter other) { if (other == this) return true; if (!(other instanceof FilterList)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java index 441852740d..71370ec1f1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java @@ -54,6 +54,7 @@ final public class FilterWrapper extends Filter { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() throws IOException { FilterProtos.FilterWrapper.Builder builder = FilterProtos.FilterWrapper.newBuilder(); @@ -181,6 +182,7 @@ final public class FilterWrapper extends Filter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof FilterWrapper)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index dafb485957..d18a1f8aaf 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -42,6 +42,7 @@ public class FirstKeyOnlyFilter extends FilterBase { public FirstKeyOnlyFilter() { } + @Override public void reset() { foundKV = false; } @@ -84,6 +85,7 @@ public class FirstKeyOnlyFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.FirstKeyOnlyFilter.Builder builder = FilterProtos.FirstKeyOnlyFilter.newBuilder(); @@ -113,6 +115,7 @@ public class FirstKeyOnlyFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof FirstKeyOnlyFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index fc40982a1d..2f1c037f25 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -82,6 +82,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder = FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder(); @@ -118,6 +119,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof FirstKeyValueMatchingQualifiersFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index 88fc17badc..d93d2342b8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -258,6 +258,7 @@ public class FuzzyRowFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() { FilterProtos.FuzzyRowFilter.Builder builder = FilterProtos.FuzzyRowFilter.newBuilder(); for (Pair fuzzyData : fuzzyKeysData) { @@ -466,45 +467,55 @@ public class FuzzyRowFilter extends FilterBase { /** Abstracts directional comparisons based on scan direction. */ private enum Order { ASC { + @Override public boolean lt(int lhs, int rhs) { return lhs < rhs; } + @Override public boolean gt(int lhs, int rhs) { return lhs > rhs; } + @Override public byte inc(byte val) { // TODO: what about over/underflow? return (byte) (val + 1); } + @Override public boolean isMax(byte val) { return val == (byte) 0xff; } + @Override public byte min() { return 0; } }, DESC { + @Override public boolean lt(int lhs, int rhs) { return lhs > rhs; } + @Override public boolean gt(int lhs, int rhs) { return lhs < rhs; } + @Override public byte inc(byte val) { // TODO: what about over/underflow? return (byte) (val - 1); } + @Override public boolean isMax(byte val) { return val == 0; } + @Override public byte min() { return (byte) 0xFF; } @@ -627,6 +638,7 @@ public class FuzzyRowFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized are equal to the * corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof FuzzyRowFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index a16e48bcf1..671e596f05 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -65,6 +65,7 @@ public class InclusiveStopFilter extends FilterBase { return v; } + @Override public boolean filterRowKey(byte[] buffer, int offset, int length) { if (buffer == null) { //noinspection RedundantIfStatement @@ -81,6 +82,7 @@ public class InclusiveStopFilter extends FilterBase { return done; } + @Override public boolean filterAllRemaining() { return done; } @@ -95,6 +97,7 @@ public class InclusiveStopFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.InclusiveStopFilter.Builder builder = FilterProtos.InclusiveStopFilter.newBuilder(); @@ -124,6 +127,7 @@ public class InclusiveStopFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof InclusiveStopFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index cebb26ac88..3895b2a82a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -88,6 +88,7 @@ public class KeyOnlyFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.KeyOnlyFilter.Builder builder = FilterProtos.KeyOnlyFilter.newBuilder(); @@ -117,6 +118,7 @@ public class KeyOnlyFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof KeyOnlyFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java index 7e9503c22b..fd21dbd465 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java @@ -141,6 +141,7 @@ public class MultiRowRangeFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() { FilterProtos.MultiRowRangeFilter.Builder builder = FilterProtos.MultiRowRangeFilter .newBuilder(); @@ -193,6 +194,7 @@ public class MultiRowRangeFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized are equal to the * corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index d3eb642cc7..96a0b389a2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -115,6 +115,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.MultipleColumnPrefixFilter.Builder builder = FilterProtos.MultipleColumnPrefixFilter.newBuilder(); @@ -152,6 +153,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof MultipleColumnPrefixFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java index a9b3c8e77e..d2f7f45c0f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java @@ -62,6 +62,7 @@ public class NullComparator extends ByteArrayComparable { /** * @return The comparator serialized using pb */ + @Override public byte [] toByteArray() { ComparatorProtos.NullComparator.Builder builder = ComparatorProtos.NullComparator.newBuilder(); @@ -90,6 +91,7 @@ public class NullComparator extends ByteArrayComparable { * @return true if and only if the fields of the comparator that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(ByteArrayComparable other) { if (other == this) return true; if (!(other instanceof NullComparator)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index ee94d4c672..9692f2287e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -24,11 +24,11 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.filter.Filter.ReturnCode; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import java.io.IOException; import java.util.ArrayList; + /** * Implementation of Filter interface that limits results to a specific page * size. It terminates scanning once the number of filter-passed rows is > @@ -72,15 +72,18 @@ public class PageFilter extends FilterBase { return v; } + @Override public boolean filterAllRemaining() { return this.rowsAccepted >= this.pageSize; } + @Override public boolean filterRow() { this.rowsAccepted++; return this.rowsAccepted > this.pageSize; } + @Override public boolean hasFilterRow() { return true; } @@ -95,6 +98,7 @@ public class PageFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.PageFilter.Builder builder = FilterProtos.PageFilter.newBuilder(); @@ -124,6 +128,7 @@ public class PageFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof PageFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java index 4ba675b863..cdb611c86f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java @@ -30,6 +30,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.nio.charset.CharacterCodingException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.EmptyStackException; @@ -263,7 +264,7 @@ public class ParseFilter { e.printStackTrace(); } throw new IllegalArgumentException("Incorrect filter string " + - new String(filterStringAsByteArray)); + new String(filterStringAsByteArray, StandardCharsets.UTF_8)); } /** @@ -811,9 +812,9 @@ public class ParseFilter { else if (Bytes.equals(comparatorType, ParseConstants.binaryPrefixType)) return new BinaryPrefixComparator(comparatorValue); else if (Bytes.equals(comparatorType, ParseConstants.regexStringType)) - return new RegexStringComparator(new String(comparatorValue)); + return new RegexStringComparator(new String(comparatorValue, StandardCharsets.UTF_8)); else if (Bytes.equals(comparatorType, ParseConstants.substringType)) - return new SubstringComparator(new String(comparatorValue)); + return new SubstringComparator(new String(comparatorValue, StandardCharsets.UTF_8)); else throw new IllegalArgumentException("Incorrect comparatorType"); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index 8030ff60ca..a3fc440023 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -50,6 +50,7 @@ public class PrefixFilter extends FilterBase { return prefix; } + @Override public boolean filterRowKey(byte[] buffer, int offset, int length) { if (buffer == null || this.prefix == null) return true; @@ -80,14 +81,17 @@ public class PrefixFilter extends FilterBase { return v; } + @Override public boolean filterRow() { return filterRow; } + @Override public void reset() { filterRow = true; } + @Override public boolean filterAllRemaining() { return passedPrefix; } @@ -102,6 +106,7 @@ public class PrefixFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.PrefixFilter.Builder builder = FilterProtos.PrefixFilter.newBuilder(); @@ -131,6 +136,7 @@ public class PrefixFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof PrefixFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index bf3a5f997b..bf503c2b05 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -81,6 +81,7 @@ public class QualifierFilter extends CompareFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.QualifierFilter.Builder builder = FilterProtos.QualifierFilter.newBuilder(); @@ -120,6 +121,7 @@ public class QualifierFilter extends CompareFilter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof QualifierFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index 243923f0e8..f6d091ffc5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -90,7 +90,8 @@ public class RandomRowFilter extends FilterBase { public boolean filterRow() { return filterOutRow; } - + + @Override public boolean hasFilterRow() { return true; } @@ -118,6 +119,7 @@ public class RandomRowFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.RandomRowFilter.Builder builder = FilterProtos.RandomRowFilter.newBuilder(); @@ -147,6 +149,7 @@ public class RandomRowFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof RandomRowFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 23a1e5d268..ee32a927c1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -97,6 +97,7 @@ public class RowFilter extends CompareFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.RowFilter.Builder builder = FilterProtos.RowFilter.newBuilder(); @@ -136,6 +137,7 @@ public class RowFilter extends CompareFilter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof RowFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index 5c8668b28b..7dc0387046 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -97,6 +97,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { } // We cleaned result row in FilterRow to be consistent with scanning process. + @Override public boolean hasFilterRow() { return true; } @@ -132,6 +133,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.SingleColumnValueExcludeFilter.Builder builder = FilterProtos.SingleColumnValueExcludeFilter.newBuilder(); @@ -175,6 +177,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof SingleColumnValueExcludeFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index 7dad1a4b10..0cf3e2f47c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -216,16 +216,19 @@ public class SingleColumnValueFilter extends FilterBase { } } + @Override public boolean filterRow() { // If column was found, return false if it was matched, true if it was not // If column not found, return true if we filter if missing, false if not return this.foundColumn? !this.matchedColumn: this.filterIfMissing; } + @Override public boolean hasFilterRow() { return true; } + @Override public void reset() { foundColumn = false; matchedColumn = false; @@ -325,6 +328,7 @@ public class SingleColumnValueFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { return convert().toByteArray(); } @@ -364,6 +368,7 @@ public class SingleColumnValueFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof SingleColumnValueFilter)) return false; @@ -382,6 +387,7 @@ public class SingleColumnValueFilter extends FilterBase { * column in whole scan. If filterIfMissing == false, all families are essential, * because of possibility of skipping the rows without any data in filtered CF. */ + @Override public boolean isFamilyEssential(byte[] name) { return !this.filterIfMissing || Bytes.equals(name, this.columnFamily); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index dd06995c7c..9bc18a45b4 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -87,10 +87,12 @@ public class SkipFilter extends FilterBase { return filter.transformCell(v); } + @Override public boolean filterRow() { return filterRow; } + @Override public boolean hasFilterRow() { return true; } @@ -98,6 +100,7 @@ public class SkipFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() throws IOException { FilterProtos.SkipFilter.Builder builder = FilterProtos.SkipFilter.newBuilder(); @@ -131,6 +134,7 @@ public class SkipFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof SkipFilter)) return false; @@ -139,6 +143,7 @@ public class SkipFilter extends FilterBase { return getFilter().areSerializedFieldsEqual(other.getFilter()); } + @Override public boolean isFamilyEssential(byte[] name) throws IOException { return filter.isFamilyEssential(name); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java index 1f0043c0c5..6c872f37f9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java @@ -71,6 +71,7 @@ public class SubstringComparator extends ByteArrayComparable { /** * @return The comparator serialized using pb */ + @Override public byte [] toByteArray() { ComparatorProtos.SubstringComparator.Builder builder = ComparatorProtos.SubstringComparator.newBuilder(); @@ -100,6 +101,7 @@ public class SubstringComparator extends ByteArrayComparable { * @return true if and only if the fields of the comparator that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(ByteArrayComparable other) { if (other == this) return true; if (!(other instanceof SubstringComparator)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index be5a0f6fe9..f28560bfd6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -124,6 +124,7 @@ public class TimestampsFilter extends FilterBase { * * @throws IOException This will never happen. */ + @Override public Cell getNextCellHint(Cell currentCell) throws IOException { if (!canHint) { return null; @@ -168,6 +169,7 @@ public class TimestampsFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() { FilterProtos.TimestampsFilter.Builder builder = FilterProtos.TimestampsFilter.newBuilder(); @@ -199,6 +201,7 @@ public class TimestampsFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof TimestampsFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 5a46d7a11e..952d64ec86 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -77,6 +77,7 @@ public class ValueFilter extends CompareFilter { /** * @return The filter serialized using pb */ + @Override public byte [] toByteArray() { FilterProtos.ValueFilter.Builder builder = FilterProtos.ValueFilter.newBuilder(); @@ -116,6 +117,7 @@ public class ValueFilter extends CompareFilter { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof ValueFilter)) return false; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index 7263e1bfcd..1cefe46e90 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -52,6 +52,7 @@ public class WhileMatchFilter extends FilterBase { return filter; } + @Override public void reset() throws IOException { this.filter.reset(); } @@ -99,6 +100,7 @@ public class WhileMatchFilter extends FilterBase { /** * @return The filter serialized using pb */ + @Override public byte[] toByteArray() throws IOException { FilterProtos.WhileMatchFilter.Builder builder = FilterProtos.WhileMatchFilter.newBuilder(); @@ -132,6 +134,7 @@ public class WhileMatchFilter extends FilterBase { * @return true if and only if the fields of the filter that are serialized * are equal to the corresponding fields in other. Used for testing. */ + @Override boolean areSerializedFieldsEqual(Filter o) { if (o == this) return true; if (!(o instanceof WhileMatchFilter)) return false; @@ -140,6 +143,7 @@ public class WhileMatchFilter extends FilterBase { return getFilter().areSerializedFieldsEqual(other.getFilter()); } + @Override public boolean isFamilyEssential(byte[] name) throws IOException { return filter.isFamilyEssential(name); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index caa19b8423..ebbf9e0304 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -242,7 +242,7 @@ public abstract class AbstractRpcClient implements RpcC return null; } try { - return (Codec) Class.forName(className).newInstance(); + return (Codec) Class.forName(className).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException("Failed getting codec " + className, e); } @@ -270,7 +270,7 @@ public abstract class AbstractRpcClient implements RpcC return null; } try { - return (CompressionCodec) Class.forName(className).newInstance(); + return (CompressionCodec) Class.forName(className).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new RuntimeException("Failed getting compressor " + className, e); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java index d27602e7de..4a83fdd999 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcClient.java @@ -67,6 +67,7 @@ public class BlockingRpcClient extends AbstractRpcClient * Creates a connection. Can be overridden by a subclass for testing. * @param remoteId - the ConnectionId to use for the connection creation. */ + @Override protected BlockingRpcConnection createConnection(ConnectionId remoteId) throws IOException { return new BlockingRpcConnection(this, remoteId); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java index 33fc880a8c..cf84c5a453 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java @@ -57,20 +57,49 @@ public class ConnectionId { } @Override - public boolean equals(Object obj) { - if (obj instanceof ConnectionId) { - ConnectionId id = (ConnectionId) obj; - return address.equals(id.address) && - ((ticket != null && ticket.equals(id.ticket)) || - (ticket == id.ticket)) && - this.serviceName == id.serviceName; - } - return false; + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((address == null) ? 0 : address.hashCode()); + result = prime * result + ((serviceName == null) ? 0 : serviceName.hashCode()); + result = prime * result + ((ticket == null) ? 0 : ticket.hashCode()); + return result; } - @Override // simply use the default Object#hashcode() ? - public int hashCode() { - return hashCode(ticket,serviceName,address); + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + ConnectionId other = (ConnectionId) obj; + if (address == null) { + if (other.address != null) { + return false; + } + } else if (!address.equals(other.address)) { + return false; + } + if (serviceName == null) { + if (other.serviceName != null) { + return false; + } + } else if (!serviceName.equals(other.serviceName)) { + return false; + } + if (ticket == null) { + if (other.ticket != null) { + return false; + } + } else if (!ticket.equals(other.ticket)) { + return false; + } + return true; } public static int hashCode(User ticket, String serviceName, InetSocketAddress address){ diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java index 5b8498db77..ab7c2a3d79 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ResponseConverter.java @@ -446,10 +446,9 @@ public final class ResponseConverter { public static Map getScanMetrics(ScanResponse response) { Map metricMap = new HashMap(); - if (response == null || !response.hasScanMetrics() || response.getScanMetrics() == null) { + if (response == null || !response.hasScanMetrics()) { return metricMap; } - ScanMetrics metrics = response.getScanMetrics(); int numberOfMetrics = metrics.getMetricsCount(); for (int i = 0; i < numberOfMetrics; i++) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java index 70e43561e0..7b6b5463e9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java @@ -67,6 +67,7 @@ public final class QuotaRetriever implements Closeable, Iterable } } + @Override public void close() throws IOException { if (this.table != null) { this.table.close(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java index a4a9720514..73a8f3090d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; @InterfaceAudience.Public @InterfaceStability.Evolving public class RegionServerRunningException extends IOException { - private static final long serialVersionUID = 1L << 31 - 1L; + private static final long serialVersionUID = (1L << 31) - 1L; /** Default Constructor */ public RegionServerRunningException() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java index c2999ecf0c..6fefb367c9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java @@ -39,14 +39,12 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos; import org.apache.hadoop.hbase.replication.ReplicationPeer.PeerState; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp; import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.KeeperException.NoNodeException; /** * This class provides an implementation of the ReplicationPeers interface using Zookeeper. The @@ -80,14 +78,12 @@ public class ReplicationPeersZKImpl extends ReplicationStateZKBase implements Re // Map of peer clusters keyed by their id private Map peerClusters; private final ReplicationQueuesClient queuesClient; - private Abortable abortable; private static final Log LOG = LogFactory.getLog(ReplicationPeersZKImpl.class); public ReplicationPeersZKImpl(final ZooKeeperWatcher zk, final Configuration conf, final ReplicationQueuesClient queuesClient, Abortable abortable) { super(zk, conf, abortable); - this.abortable = abortable; this.peerClusters = new ConcurrentHashMap(); this.queuesClient = queuesClient; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java index f9f2d43cd0..8e4871db36 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/ReplicationTrackerZKImpl.java @@ -102,6 +102,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements * Called when a new node has been created. * @param path full path of the new node */ + @Override public void nodeCreated(String path) { refreshListIfRightPath(path); } @@ -110,6 +111,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements * Called when a node has been deleted * @param path full path of the deleted node */ + @Override public void nodeDeleted(String path) { if (stopper.isStopped()) { return; @@ -128,6 +130,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements * Called when an existing node has a child node added or removed. * @param path full path of the node whose children have changed */ + @Override public void nodeChildrenChanged(String path) { if (stopper.isStopped()) { return; @@ -159,6 +162,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements * Called when a node has been deleted * @param path full path of the deleted node */ + @Override public void nodeDeleted(String path) { List peers = refreshPeersList(path); if (peers == null) { @@ -177,6 +181,7 @@ public class ReplicationTrackerZKImpl extends ReplicationStateZKBase implements * Called when an existing node has a child node added or removed. * @param path full path of the node whose children have changed */ + @Override public void nodeChildrenChanged(String path) { List peers = refreshPeersList(path); if (peers == null) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java index 54c1701f5c..b26dcac89d 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.security; +import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.TreeMap; @@ -67,15 +68,15 @@ public class SaslUtil { } static String encodeIdentifier(byte[] identifier) { - return new String(Base64.encodeBase64(identifier)); + return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8); } static byte[] decodeIdentifier(String identifier) { - return Base64.decodeBase64(identifier.getBytes()); + return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8)); } static char[] encodePassword(byte[] password) { - return new String(Base64.encodeBase64(password)).toCharArray(); + return new String(Base64.encodeBase64(password), StandardCharsets.UTF_8).toCharArray(); } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java index 3a01ace763..c904eef1bd 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java @@ -48,7 +48,7 @@ public class Permission extends VersionedWritable { public enum Action { READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A'); - private byte code; + private final byte code; Action(char code) { this.code = (byte)code; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java index fd1a9d590f..b06acbdffc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java @@ -132,6 +132,7 @@ public class VisibilityClient { BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + @Override public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { VisibilityLabelsRequest.Builder builder = VisibilityLabelsRequest.newBuilder(); @@ -217,6 +218,7 @@ public class VisibilityClient { BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + @Override public GetAuthsResponse call(VisibilityLabelsService service) throws IOException { GetAuthsRequest.Builder getAuthReqBuilder = GetAuthsRequest.newBuilder(); getAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user))); @@ -268,6 +270,7 @@ public class VisibilityClient { BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + @Override public ListLabelsResponse call(VisibilityLabelsService service) throws IOException { ListLabelsRequest.Builder listAuthLabelsReqBuilder = ListLabelsRequest.newBuilder(); if (regex != null) { @@ -332,6 +335,7 @@ public class VisibilityClient { BlockingRpcCallback rpcCallback = new BlockingRpcCallback(); + @Override public VisibilityLabelsResponse call(VisibilityLabelsService service) throws IOException { SetAuthsRequest.Builder setAuthReqBuilder = SetAuthsRequest.newBuilder(); setAuthReqBuilder.setUser(ByteStringer.wrap(Bytes.toBytes(user))); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java index 67aaffd877..e474b1e9e2 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java @@ -298,7 +298,7 @@ public class PoolMap implements Map { * the type of the resource */ @SuppressWarnings("serial") - public class ReusablePool extends ConcurrentLinkedQueue implements Pool { + public static class ReusablePool extends ConcurrentLinkedQueue implements Pool { private int maxSize; public ReusablePool(int maxSize) { @@ -342,7 +342,7 @@ public class PoolMap implements Map { * */ @SuppressWarnings("serial") - class RoundRobinPool extends CopyOnWriteArrayList implements Pool { + static class RoundRobinPool extends CopyOnWriteArrayList implements Pool { private int maxSize; private int nextResource = 0; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java index 20791de453..8075a7aa1f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/EmptyWatcher.java @@ -30,5 +30,6 @@ public class EmptyWatcher implements Watcher { public static final EmptyWatcher instance = new EmptyWatcher(); private EmptyWatcher() {} + @Override public void process(WatchedEvent event) {} } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java index 226796aff7..6d1772d14a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java @@ -41,6 +41,7 @@ import java.io.PrintWriter; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; @@ -159,7 +160,7 @@ public class HQuorumPeer { } File myIdFile = new File(dataDir, "myid"); - PrintWriter w = new PrintWriter(myIdFile); + PrintWriter w = new PrintWriter(myIdFile, StandardCharsets.UTF_8.name()); w.println(myId); w.close(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java index cc0f5f27ad..62dc17d7f0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java @@ -43,6 +43,7 @@ import org.apache.zookeeper.KeeperException; public class ZKLeaderManager extends ZooKeeperListener { private static final Log LOG = LogFactory.getLog(ZKLeaderManager.class); + private final Object lock = new Object(); private final AtomicBoolean leaderExists = new AtomicBoolean(); private String leaderZNode; private byte[] nodeId; @@ -85,14 +86,14 @@ public class ZKLeaderManager extends ZooKeeperListener { private void handleLeaderChange() { try { - synchronized(leaderExists) { + synchronized(lock) { if (ZKUtil.watchAndCheckExists(watcher, leaderZNode)) { LOG.info("Found new leader for znode: "+leaderZNode); leaderExists.set(true); } else { LOG.info("Leader change, but no new leader found"); leaderExists.set(false); - leaderExists.notifyAll(); + lock.notifyAll(); } } } catch (KeeperException ke) { @@ -136,10 +137,10 @@ public class ZKLeaderManager extends ZooKeeperListener { } // wait for next chance - synchronized(leaderExists) { + synchronized(lock) { while (leaderExists.get() && !candidate.isStopped()) { try { - leaderExists.wait(); + lock.wait(); } catch (InterruptedException ie) { LOG.debug("Interrupted waiting on leader", ie); } @@ -153,7 +154,7 @@ public class ZKLeaderManager extends ZooKeeperListener { */ public void stepDownAsLeader() { try { - synchronized(leaderExists) { + synchronized(lock) { if (!leaderExists.get()) { return; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java index d874768e20..4f1d87c6b3 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java @@ -19,11 +19,14 @@ package org.apache.hadoop.hbase.zookeeper; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; +import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.net.InetSocketAddress; import java.net.Socket; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; @@ -1954,9 +1957,11 @@ public class ZKUtil { socket.connect(sockAddr, timeout); socket.setSoTimeout(timeout); - PrintWriter out = new PrintWriter(socket.getOutputStream(), true); - BufferedReader in = new BufferedReader(new InputStreamReader( - socket.getInputStream())); + PrintWriter out = new PrintWriter(new BufferedWriter( + new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8)), + true); + BufferedReader in = new BufferedReader( + new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8)); out.println("stat"); out.flush(); ArrayList res = new ArrayList(); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java index 1966253a0f..a4f926098f 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHColumnDescriptor.java @@ -19,6 +19,9 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.nio.charset.StandardCharsets; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.io.compress.Compression; @@ -77,7 +80,8 @@ public class TestHColumnDescriptor { public void testHColumnDescriptorShouldThrowIAEWhenFamiliyNameEmpty() throws Exception { try { - new HColumnDescriptor("".getBytes()); + new HColumnDescriptor("".getBytes(StandardCharsets.UTF_8)); + fail("Did not throw"); } catch (IllegalArgumentException e) { assertEquals("Family name can not be empty", e.getLocalizedMessage()); } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java index d126994b61..23d2946db1 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java @@ -100,7 +100,7 @@ public class TestHTableDescriptor { assertEquals(v, deserializedHtd.getMaxFileSize()); assertTrue(deserializedHtd.isReadOnly()); assertEquals(Durability.ASYNC_WAL, deserializedHtd.getDurability()); - assertEquals(deserializedHtd.getRegionReplication(), 2); + assertEquals(2, deserializedHtd.getRegionReplication()); } /** diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java index 0e0fbb024a..b19ba36974 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/TestInterfaceAudienceAnnotations.java @@ -62,7 +62,7 @@ public class TestInterfaceAudienceAnnotations { private static final Log LOG = LogFactory.getLog(TestInterfaceAudienceAnnotations.class); /** Selects classes with generated in their package name */ - class GeneratedClassFilter implements ClassFinder.ClassFilter { + static class GeneratedClassFilter implements ClassFinder.ClassFilter { @Override public boolean isCandidateClass(Class c) { return c.getPackage().getName().contains("generated"); @@ -181,7 +181,7 @@ public class TestInterfaceAudienceAnnotations { } /** Selects classes that are declared public */ - class PublicClassFilter implements ClassFinder.ClassFilter { + static class PublicClassFilter implements ClassFinder.ClassFilter { @Override public boolean isCandidateClass(Class c) { int mod = c.getModifiers(); @@ -190,7 +190,7 @@ public class TestInterfaceAudienceAnnotations { } /** Selects paths (jars and class dirs) only from the main code, not test classes */ - class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter { + static class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter { @Override public boolean isCandidatePath(String resourcePath, boolean isJar) { return !resourcePath.contains("test-classes") && @@ -207,7 +207,7 @@ public class TestInterfaceAudienceAnnotations { * - enclosing class is not an interface * - name starts with "__CLR" */ - class CloverInstrumentationFilter implements ClassFinder.ClassFilter { + static class CloverInstrumentationFilter implements ClassFinder.ClassFilter { @Override public boolean isCandidateClass(Class clazz) { boolean clover = false; diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java index 8c0b7df26d..e0d09a693f 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcess.java @@ -24,8 +24,10 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.io.InterruptedIOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; @@ -93,10 +95,10 @@ public class TestAsyncProcess { private final static Log LOG = LogFactory.getLog(TestAsyncProcess.class); private static final TableName DUMMY_TABLE = TableName.valueOf("DUMMY_TABLE"); - private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(); - private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(); - private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes(); - private static final byte[] FAILS = "FAILS".getBytes(); + private static final byte[] DUMMY_BYTES_1 = "DUMMY_BYTES_1".getBytes(StandardCharsets.UTF_8); + private static final byte[] DUMMY_BYTES_2 = "DUMMY_BYTES_2".getBytes(StandardCharsets.UTF_8); + private static final byte[] DUMMY_BYTES_3 = "DUMMY_BYTES_3".getBytes(StandardCharsets.UTF_8); + private static final byte[] FAILS = "FAILS".getBytes(StandardCharsets.UTF_8); private static final Configuration conf = new Configuration(); private static ServerName sn = ServerName.valueOf("s1:1,1"); @@ -353,7 +355,8 @@ public class TestAsyncProcess { return inc.getAndIncrement(); } } - class MyAsyncProcessWithReplicas extends MyAsyncProcess { + + static class MyAsyncProcessWithReplicas extends MyAsyncProcess { private Set failures = new TreeSet(new Bytes.ByteArrayComparator()); private long primarySleepMs = 0, replicaSleepMs = 0; private Map customPrimarySleepMs = new HashMap(); @@ -625,7 +628,13 @@ public class TestAsyncProcess { Random rn = new Random(); final long limit = 10 * 1024 * 1024; final int requestCount = 1 + (int) (rn.nextDouble() * 3); - long putsHeapSize = Math.abs(rn.nextLong()) % limit; + long n = rn.nextLong(); + if (n < 0) { + n = -n; + } else if (n == 0) { + n = 1; + } + long putsHeapSize = n % limit; long maxHeapSizePerRequest = putsHeapSize / requestCount; LOG.info("[testSubmitRandomSizeRequest] maxHeapSizePerRequest=" + maxHeapSizePerRequest + ", putsHeapSize=" + putsHeapSize); @@ -747,7 +756,7 @@ public class TestAsyncProcess { final AsyncRequestFuture ars = ap.submit(DUMMY_TABLE, puts, false, cb, false); Assert.assertTrue(puts.isEmpty()); ars.waitUntilDone(); - Assert.assertEquals(updateCalled.get(), 1); + Assert.assertEquals(1, updateCalled.get()); } @Test @@ -759,12 +768,12 @@ public class TestAsyncProcess { puts.add(createPut(1, true)); for (int i = 0; i != ap.maxConcurrentTasksPerRegion; ++i) { - ap.incTaskCounters(Arrays.asList(hri1.getRegionName()), sn); + ap.incTaskCounters(Collections.singletonList(hri1.getRegionName()), sn); } ap.submit(DUMMY_TABLE, puts, false, null, false); Assert.assertEquals(puts.size(), 1); - ap.decTaskCounters(Arrays.asList(hri1.getRegionName()), sn); + ap.decTaskCounters(Collections.singletonList(hri1.getRegionName()), sn); ap.submit(DUMMY_TABLE, puts, false, null, false); Assert.assertEquals(0, puts.size()); } @@ -945,7 +954,7 @@ public class TestAsyncProcess { final AsyncProcess ap = new MyAsyncProcess(createHConnection(), conf, false); for (int i = 0; i < 1000; i++) { - ap.incTaskCounters(Arrays.asList("dummy".getBytes()), sn); + ap.incTaskCounters(Collections.singletonList("dummy".getBytes(StandardCharsets.UTF_8)), sn); } final Thread myThread = Thread.currentThread(); @@ -976,7 +985,7 @@ public class TestAsyncProcess { public void run() { Threads.sleep(sleepTime); while (ap.tasksInProgress.get() > 0) { - ap.decTaskCounters(Arrays.asList("dummy".getBytes()), sn); + ap.decTaskCounters(Collections.singletonList("dummy".getBytes(StandardCharsets.UTF_8)), sn); } } }; @@ -1336,13 +1345,13 @@ public class TestAsyncProcess { } catch (RetriesExhaustedException expected) { } - Assert.assertEquals(res[0], success); - Assert.assertEquals(res[1], success); - Assert.assertEquals(res[2], success); - Assert.assertEquals(res[3], success); - Assert.assertEquals(res[4], failure); - Assert.assertEquals(res[5], success); - Assert.assertEquals(res[6], failure); + Assert.assertEquals(success, res[0]); + Assert.assertEquals(success, res[1]); + Assert.assertEquals(success, res[2]); + Assert.assertEquals(success, res[3]); + Assert.assertEquals(failure, res[4]); + Assert.assertEquals(success, res[5]); + Assert.assertEquals(failure, res[6]); } @Test public void testErrorsServers() throws IOException { @@ -1479,7 +1488,7 @@ public class TestAsyncProcess { ht.batch(gets, new Object[gets.size()]); - Assert.assertEquals(ap.nbActions.get(), NB_REGS); + Assert.assertEquals(NB_REGS, ap.nbActions.get()); Assert.assertEquals("1 multi response per server", 2, ap.nbMultiResponse.get()); Assert.assertEquals("1 thread per server", 2, con.nbThreads.get()); @@ -1487,7 +1496,7 @@ public class TestAsyncProcess { for (int i =0; i token = createTokenMock(); - when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes()); - when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes()); + when(token.getIdentifier()) + .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8)); + when(token.getPassword()) + .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8)); final NameCallback nameCallback = mock(NameCallback.class); final PasswordCallback passwordCallback = mock(PasswordCallback.class); @@ -120,8 +123,10 @@ public class TestHBaseSaslRpcClient { @Test public void testSaslClientCallbackHandlerWithException() { final Token token = createTokenMock(); - when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes()); - when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes()); + when(token.getIdentifier()) + .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8)); + when(token.getPassword()) + .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8)); final SaslClientCallbackHandler saslClCallbackHandler = new SaslClientCallbackHandler(token); try { saslClCallbackHandler.handle(new Callback[] { mock(TextOutputCallback.class) }); @@ -291,8 +296,10 @@ public class TestHBaseSaslRpcClient { throws IOException { Token token = createTokenMock(); if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(password)) { - when(token.getIdentifier()).thenReturn(DEFAULT_USER_NAME.getBytes()); - when(token.getPassword()).thenReturn(DEFAULT_USER_PASSWORD.getBytes()); + when(token.getIdentifier()) + .thenReturn(DEFAULT_USER_NAME.getBytes(StandardCharsets.UTF_8)); + when(token.getPassword()) + .thenReturn(DEFAULT_USER_PASSWORD.getBytes(StandardCharsets.UTF_8)); } return token; } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java index 9990cd1afd..f5a7a340ba 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKUtil.java @@ -23,7 +23,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; -import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.testclassification.SmallTests; @@ -51,7 +50,7 @@ public class TestZKUtil { String node = "/hbase/testUnsecure"; ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false); List aclList = ZKUtil.createACL(watcher, node, false); - Assert.assertEquals(aclList.size(), 1); + Assert.assertEquals(1, aclList.size()); Assert.assertTrue(aclList.contains(Ids.OPEN_ACL_UNSAFE.iterator().next())); } @@ -62,7 +61,7 @@ public class TestZKUtil { String node = "/hbase/testSecuritySingleSuperuser"; ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false); List aclList = ZKUtil.createACL(watcher, node, true); - Assert.assertEquals(aclList.size(), 2); // 1+1, since ACL will be set for the creator by default + Assert.assertEquals(2, aclList.size()); // 1+1, since ACL will be set for the creator by default Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user1")))); Assert.assertTrue(aclList.contains(Ids.CREATOR_ALL_ACL.iterator().next())); } @@ -74,7 +73,7 @@ public class TestZKUtil { String node = "/hbase/testCreateACL"; ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false); List aclList = ZKUtil.createACL(watcher, node, true); - Assert.assertEquals(aclList.size(), 4); // 3+1, since ACL will be set for the creator by default + Assert.assertEquals(4, aclList.size()); // 3+1, since ACL will be set for the creator by default Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group1")))); Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group2")))); Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user1")))); @@ -90,13 +89,14 @@ public class TestZKUtil { String node = "/hbase/testCreateACL"; ZooKeeperWatcher watcher = new ZooKeeperWatcher(conf, node, null, false); List aclList = ZKUtil.createACL(watcher, node, true); - Assert.assertEquals(aclList.size(), 3); // 3, since service user the same as one of superuser + Assert.assertEquals(3, aclList.size()); // 3, since service user the same as one of superuser Assert.assertFalse(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "@group1")))); Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("auth", "")))); Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user5")))); Assert.assertTrue(aclList.contains(new ACL(Perms.ALL, new Id("sasl", "user6")))); } + @Test public void testInterruptedDuringAction() throws ZooKeeperConnectionException, IOException, KeeperException, InterruptedException { final RecoverableZooKeeper recoverableZk = Mockito.mock(RecoverableZooKeeper.class); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java index 338265dd42..701c6e1cb5 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/AsyncConsoleAppender.java @@ -40,6 +40,7 @@ public class AsyncConsoleAppender extends AsyncAppender { consoleAppender.setTarget(value); } + @Override public void activateOptions() { consoleAppender.activateOptions(); super.activateOptions(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java index 0290ded596..981fad85de 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java @@ -633,7 +633,7 @@ public final class CellUtil { if (cell instanceof KeyValue) { return ((KeyValue)cell).heapSizeWithoutTags(); } - return getSumOfCellKeyElementLengths(cell) + cell.getValueLength(); + return (long) getSumOfCellKeyElementLengths(cell) + cell.getValueLength(); } /********************* tags *************************************/ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java index c330fa75cf..980f001f69 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java @@ -51,12 +51,13 @@ public class JitterScheduledThreadPoolExecutorImpl extends ScheduledThreadPoolEx this.spread = spread; } + @Override protected java.util.concurrent.RunnableScheduledFuture decorateTask( Runnable runnable, java.util.concurrent.RunnableScheduledFuture task) { return new JitteredRunnableScheduledFuture<>(task); } - + @Override protected java.util.concurrent.RunnableScheduledFuture decorateTask( Callable callable, java.util.concurrent.RunnableScheduledFuture task) { return new JitteredRunnableScheduledFuture<>(task); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 7670aea283..2b09faf10b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -197,9 +197,9 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, */ public static long getKeyValueDataStructureSize(int klength, int vlength, int tagsLength) { if (tagsLength == 0) { - return KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength; + return (long) KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + klength + vlength; } - return KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength; + return (long) KeyValue.KEYVALUE_WITH_TAGS_INFRASTRUCTURE_SIZE + klength + vlength + tagsLength; } /** @@ -213,7 +213,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, * @return the key data structure length */ public static long getKeyDataStructureSize(int rlength, int flength, int qlength) { - return KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength; + return (long) KeyValue.KEY_INFRASTRUCTURE_SIZE + rlength + flength + qlength; } /** @@ -2531,7 +2531,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, int length = kv.getLength(); out.writeInt(length); out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; + return (long) length + Bytes.SIZEOF_INT; } /** @@ -2553,7 +2553,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, // This does same as DataOuput#writeInt (big-endian, etc.) out.write(Bytes.toBytes(length)); out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; + return (long) length + Bytes.SIZEOF_INT; } /** @@ -2580,7 +2580,7 @@ public class KeyValue implements Cell, HeapSize, Cloneable, SettableSequenceId, // This does same as DataOuput#writeInt (big-endian, etc.) StreamUtils.writeInt(out, length); out.write(kv.getBuffer(), kv.getOffset(), length); - return length + Bytes.SIZEOF_INT; + return (long) length + Bytes.SIZEOF_INT; } /** diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java index 939002cb13..e427e50d63 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java @@ -79,6 +79,7 @@ public class ProcedureInfo implements Cloneable { @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="CN_IDIOM_NO_SUPER_CALL", justification="Intentional; calling super class clone doesn't make sense here.") + @Override public ProcedureInfo clone() { return new ProcedureInfo(procId, procName, procOwner, procState, parentId, nonceKey, exception, lastUpdate, startTime, result); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java index b4efaf8039..21c841c814 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/CellCodecWithTags.java @@ -80,6 +80,7 @@ public class CellCodecWithTags implements Codec { super(in); } + @Override protected Cell parseCell() throws IOException { byte[] row = readByteArray(this.in); byte[] family = readByteArray(in); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index 07fd838201..6df9ec3ec4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -64,6 +64,7 @@ public class KeyValueCodec implements Codec { super(in); } + @Override protected Cell parseCell() throws IOException { return KeyValueUtil.iscreate(in, false); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java index 5d34a46f83..c241785e52 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodecWithTags.java @@ -70,6 +70,7 @@ public class KeyValueCodecWithTags implements Codec { super(in); } + @Override protected Cell parseCell() throws IOException { return KeyValueUtil.iscreate(in, true); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java index 4d526143dd..8c371a6d86 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/BoundedByteBufferPool.java @@ -60,7 +60,7 @@ public class BoundedByteBufferPool { volatile int runningAverage; // Scratch that keeps rough total size of pooled bytebuffers - private volatile int totalReservoirCapacity; + private AtomicLong totalReservoirCapacity = new AtomicLong(0); // For reporting private AtomicLong allocations = new AtomicLong(0); @@ -89,7 +89,7 @@ public class BoundedByteBufferPool { try { bb = this.buffers.poll(); if (bb != null) { - this.totalReservoirCapacity -= bb.capacity(); + this.totalReservoirCapacity.addAndGet(-bb.capacity()); } } finally { lock.unlock(); @@ -119,8 +119,8 @@ public class BoundedByteBufferPool { try { success = this.buffers.offer(bb); if (success) { - this.totalReservoirCapacity += bb.capacity(); - average = this.totalReservoirCapacity / this.buffers.size(); // size will never be 0. + average = (int) this.totalReservoirCapacity.addAndGet(bb.capacity()) / + this.buffers.size(); // size will never be 0. } } finally { lock.unlock(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java index d74a5d6283..dd7e300883 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java @@ -148,6 +148,7 @@ implements WritableComparable { return this.offset; } + @Override public void readFields(final DataInput in) throws IOException { this.length = in.readInt(); this.bytes = new byte[this.length]; @@ -155,6 +156,7 @@ implements WritableComparable { this.offset = 0; } + @Override public void write(final DataOutput out) throws IOException { out.writeInt(this.length); out.write(this.bytes, this.offset, this.length); @@ -175,6 +177,7 @@ implements WritableComparable { * @return Positive if left is bigger than right, 0 if they are equal, and * negative if left is smaller than right. */ + @Override public int compareTo(ImmutableBytesWritable that) { return WritableComparator.compareBytes( this.bytes, this.offset, this.length, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java index 9697da334a..d056115a5d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/Compression.java @@ -106,7 +106,7 @@ public final class Compression { LZO("lzo") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lzoCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -133,7 +133,7 @@ public final class Compression { }, GZ("gz") { private volatile transient GzipCodec codec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override DefaultCodec getCodec(Configuration conf) { @@ -185,7 +185,7 @@ public final class Compression { SNAPPY("snappy") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec snappyCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -212,7 +212,7 @@ public final class Compression { LZ4("lz4") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec lz4Codec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -239,7 +239,7 @@ public final class Compression { BZIP2("bzip2") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec bzipCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { @@ -266,7 +266,7 @@ public final class Compression { ZSTD("zstd") { // Use base type to avoid compile-time dependencies. private volatile transient CompressionCodec zStandardCodec; - private transient Object lock = new Object(); + private final transient Object lock = new Object(); @Override CompressionCodec getCodec(Configuration conf) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java index d7535e58ce..7d6b15c8b4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java @@ -176,13 +176,10 @@ public enum DataBlockEncoding { protected static DataBlockEncoder createEncoder(String fullyQualifiedClassName){ try { - return (DataBlockEncoder)Class.forName(fullyQualifiedClassName).newInstance(); - } catch (InstantiationException e) { + return (DataBlockEncoder)Class.forName(fullyQualifiedClassName) + .getDeclaredConstructor().newInstance(); + } catch (Exception e) { throw new RuntimeException(e); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } catch (ClassNotFoundException e) { - throw new IllegalArgumentException(e); } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java index 61444784f2..f28100dfc1 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java @@ -206,7 +206,7 @@ public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder { private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell) throws IOException { - byte flag = 0; + int flag = 0; // Do not use more bits that can fit into a byte int kLength = KeyValueUtil.keyLength(cell); int vLength = cell.getValueLength(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java index a4fca2ca91..192c84d1bb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java @@ -255,7 +255,7 @@ public class EncodedDataBlock { } BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream(); baos.writeTo(stream); - this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf); + this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.ourBytes); } catch (IOException e) { throw new RuntimeException(String.format( "Bug in encoding part of algorithm %s. " + @@ -266,11 +266,11 @@ public class EncodedDataBlock { } private static class BufferGrabbingByteArrayOutputStream extends ByteArrayOutputStream { - private byte[] buf; + private byte[] ourBytes; @Override - public void write(byte[] b, int off, int len) { - this.buf = b; + public synchronized void write(byte[] b, int off, int len) { + this.ourBytes = b; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java index c14b542181..72b6a5c00e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java @@ -250,7 +250,7 @@ public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder { private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell) throws IOException { - byte flag = 0; + int flag = 0; // Do not use more bits than will fit into a byte int kLength = KeyValueUtil.keyLength(cell); int vLength = cell.getValueLength(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java index 8562cf06f8..78ae9a64f2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/util/LRUDictionary.java @@ -134,7 +134,7 @@ public class LRUDictionary implements Dictionary { } private void moveToHead(Node n) { - if (head == n) { + if (head.equals(n)) { // no-op -- it's already the head. return; } @@ -147,7 +147,7 @@ public class LRUDictionary implements Dictionary { if (n.next != null) { n.next.prev = n.prev; } else { - assert n == tail; + assert n.equals(tail); tail = n.prev; } // Node is now removed from the list. Re-add it at the head. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java index 7e6de4ad56..9eac12f7a6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/security/UserProvider.java @@ -98,11 +98,11 @@ public class UserProvider extends BaseConfigurable { } // Provide the reload function that uses the executor thread. - public ListenableFuture reload(final String k, - String[] oldValue) throws Exception { + @Override + public ListenableFuture reload(final String k, String[] oldValue) + throws Exception { return executor.submit(new Callable() { - @Override public String[] call() throws Exception { return getGroupStrings(k); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java index 2d9c398dad..4818efc1e9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/SpanReceiverHost.java @@ -45,7 +45,7 @@ public class SpanReceiverHost { @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SE_BAD_FIELD") private static enum SingletonHolder { INSTANCE; - Object lock = new Object(); + final Object lock = new Object(); SpanReceiverHost host = null; // FindBugs: SE_BAD_FIELD } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java index 8de39aec7c..8ed2322619 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/types/CopyOnWriteArrayMap.java @@ -706,7 +706,7 @@ public class CopyOnWriteArrayMap extends AbstractMap } } - private final class ArrayKeyIterator implements Iterator { + private static final class ArrayKeyIterator implements Iterator { int index; private final ArrayHolder holder; @@ -732,7 +732,7 @@ public class CopyOnWriteArrayMap extends AbstractMap } } - private final class ArrayValueIterator implements Iterator { + private static final class ArrayValueIterator implements Iterator { int index; private final ArrayHolder holder; @@ -758,7 +758,7 @@ public class CopyOnWriteArrayMap extends AbstractMap } } - private final class ArrayEntryIterator implements Iterator> { + private static final class ArrayEntryIterator implements Iterator> { int index; private final ArrayHolder holder; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java index a68d12f9b5..137d1ca912 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java @@ -189,9 +189,9 @@ public abstract class AbstractByteRange implements ByteRange { public short getShort(int index) { int offset = this.offset + index; short n = 0; - n ^= bytes[offset] & 0xFF; - n <<= 8; - n ^= bytes[offset + 1] & 0xFF; + n = (short) ((n ^ bytes[offset]) & 0xFF); + n = (short) (n << 8); + n = (short) ((n ^ bytes[offset + 1]) & 0xFF); return n; } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java index 004313501f..00c05cd121 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Base64.java @@ -40,6 +40,7 @@ import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @@ -571,7 +572,7 @@ public class Base64 { return new String(baos.toByteArray(), PREFERRED_ENCODING); } catch (UnsupportedEncodingException uue) { - return new String(baos.toByteArray()); + return new String(baos.toByteArray(), StandardCharsets.UTF_8); } catch (IOException e) { LOG.error("error encoding object", e); @@ -696,7 +697,7 @@ public class Base64 { return new String(baos.toByteArray(), PREFERRED_ENCODING); } catch (UnsupportedEncodingException uue) { - return new String(baos.toByteArray()); + return new String(baos.toByteArray(), StandardCharsets.UTF_8); } catch (IOException e) { LOG.error("error encoding byte array", e); @@ -753,7 +754,7 @@ public class Base64 { return new String(outBuff, 0, e, PREFERRED_ENCODING); } catch (UnsupportedEncodingException uue) { - return new String(outBuff, 0, e); + return new String(outBuff, 0, e, StandardCharsets.UTF_8); } } // end encodeBytes @@ -928,7 +929,7 @@ public class Base64 { bytes = s.getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException uee) { - bytes = s.getBytes(); + bytes = s.getBytes(StandardCharsets.UTF_8); } // end catch // Decode diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java index d3414dd599..d9f8dcfd1d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferArray.java @@ -167,7 +167,7 @@ public final class ByteBufferArray { int endBuffer = (int) (end / bufferSize), endOffset = (int) (end % bufferSize); assert array.length >= len + arrayOffset; assert startBuffer >= 0 && startBuffer < bufferCount; - assert endBuffer >= 0 && endBuffer < bufferCount + assert (endBuffer >= 0 && endBuffer < bufferCount) || (endBuffer == bufferCount && endOffset == 0); if (startBuffer >= locks.length || startBuffer < 0) { String msg = "Failed multiple, start=" + start + ",startBuffer=" diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java index cf59f694b1..b5b1d96a4d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java @@ -190,27 +190,27 @@ public final class ByteBufferUtils { return 8; } - if (value < (1l << 4 * 8)) { + if (value < (1l << (4 * 8))) { // no more than 4 bytes - if (value < (1l << 2 * 8)) { - if (value < (1l << 1 * 8)) { + if (value < (1l << (2 * 8))) { + if (value < (1l << (1 * 8))) { return 1; } return 2; } - if (value < (1l << 3 * 8)) { + if (value < (1l << (3 * 8))) { return 3; } return 4; } // more than 4 bytes - if (value < (1l << 6 * 8)) { - if (value < (1l << 5 * 8)) { + if (value < (1l << (6 * 8))) { + if (value < (1l << (5 * 8))) { return 5; } return 6; } - if (value < (1l << 7 * 8)) { + if (value < (1l << (7 * 8))) { return 7; } return 8; @@ -226,13 +226,13 @@ public final class ByteBufferUtils { return 4; } - if (value < (1 << 2 * 8)) { - if (value < (1 << 1 * 8)) { + if (value < (1 << (2 * 8))) { + if (value < (1 << (1 * 8))) { return 1; } return 2; } - if (value <= (1 << 3 * 8)) { + if (value <= (1 << (3 * 8))) { return 3; } return 4; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java index 820b81e618..93dcafe7a3 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java @@ -1122,9 +1122,9 @@ public class Bytes implements Comparable { return toShortUnsafe(bytes, offset); } else { short n = 0; - n ^= bytes[offset] & 0xFF; - n <<= 8; - n ^= bytes[offset+1] & 0xFF; + n = (short) ((n ^ bytes[offset]) & 0xFF); + n = (short) (n << 8); + n = (short) ((n ^ bytes[offset+1]) & 0xFF); return n; } } @@ -1565,8 +1565,8 @@ public class Bytes implements Comparable { final int stride = 8; final int minLength = Math.min(length1, length2); int strideLimit = minLength & ~(stride - 1); - final long offset1Adj = offset1 + BYTE_ARRAY_BASE_OFFSET; - final long offset2Adj = offset2 + BYTE_ARRAY_BASE_OFFSET; + final long offset1Adj = (long) offset1 + BYTE_ARRAY_BASE_OFFSET; + final long offset2Adj = (long) offset2 + BYTE_ARRAY_BASE_OFFSET; int i; /* diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java index 262864ab4f..35acbde092 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassLoaderBase.java @@ -62,6 +62,7 @@ public class ClassLoaderBase extends URLClassLoader { /** * Returns the lock object for class loading operations. */ + @Override protected Object getClassLoadingLock(String className) { Object lock = parallelLockMap.get(className); if (lock != null) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java index 51e169441c..edb5b2d935 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ClassSize.java @@ -344,7 +344,7 @@ public class ClassSize { * @return the size estimate, in bytes */ private static long estimateBaseFromCoefficients(int [] coeff, boolean debug) { - long prealign_size = OBJECT + coeff[0] + coeff[2] * REFERENCE; + long prealign_size = (long) OBJECT + coeff[0] + coeff[2] * REFERENCE; // Round up to a multiple of 8 long size = align(prealign_size) + align(coeff[1] * ARRAY); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java index 4457fe00b3..439b32128a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/HasThread.java @@ -44,7 +44,8 @@ public abstract class HasThread implements Runnable { public Thread getThread() { return thread; } - + + @Override public abstract void run(); //// Begin delegation to Thread diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java index 3625a12566..9e9fc6fab7 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JVM.java @@ -26,6 +26,7 @@ import java.lang.management.ManagementFactory; import java.lang.management.OperatingSystemMXBean; import java.lang.management.RuntimeMXBean; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -160,7 +161,7 @@ public class JVM { new String[]{"bash", "-c", "ls /proc/" + pidhost[0] + "/fdinfo | wc -l"}); inputStream = p.getInputStream(); - inputStreamReader = new InputStreamReader(inputStream); + inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); bufferedReader = new BufferedReader(inputStreamReader); String openFileDesCount; if ((openFileDesCount = bufferedReader.readLine()) != null) { @@ -236,7 +237,7 @@ public class JVM { int count = 0; Process p = Runtime.getRuntime().exec("ps -e"); inputStream = p.getInputStream(); - inputStreamReader = new InputStreamReader(inputStream); + inputStreamReader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); bufferedReader = new BufferedReader(inputStreamReader); while (bufferedReader.readLine() != null) { count++; @@ -288,7 +289,7 @@ public class JVM { //using linux bash commands to retrieve info Process p = Runtime.getRuntime().exec(new String[]{"bash", "-c", "ulimit -n"}); in = p.getInputStream(); - output = new BufferedReader(new InputStreamReader(in)); + output = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); String maxFileDesCount; if ((maxFileDesCount = output.readLine()) != null) { return Long.parseLong(maxFileDesCount); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java index 9bdb829b94..a969949500 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/LongAdder.java @@ -67,6 +67,7 @@ public class LongAdder extends Striped64 implements Serializable { /** * Version of plus for use in retryUpdate */ + @Override final long fn(long v, long x) { return v + x; } /** @@ -171,6 +172,7 @@ public class LongAdder extends Striped64 implements Serializable { * Returns the String representation of the {@link #sum}. * @return the String representation of the {@link #sum} */ + @Override public String toString() { return Long.toString(sum()); } @@ -180,6 +182,7 @@ public class LongAdder extends Striped64 implements Serializable { * * @return the sum */ + @Override public long longValue() { return sum(); } @@ -188,6 +191,7 @@ public class LongAdder extends Striped64 implements Serializable { * Returns the {@link #sum} as an {@code int} after a narrowing * primitive conversion. */ + @Override public int intValue() { return (int)sum(); } @@ -196,6 +200,7 @@ public class LongAdder extends Striped64 implements Serializable { * Returns the {@link #sum} as a {@code float} * after a widening primitive conversion. */ + @Override public float floatValue() { return (float)sum(); } @@ -204,6 +209,7 @@ public class LongAdder extends Striped64 implements Serializable { * Returns the {@link #sum} as a {@code double} after a widening * primitive conversion. */ + @Override public double doubleValue() { return (double)sum(); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index 9a40aeef13..03ff74575d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -505,17 +505,17 @@ public class OrderedBytes { x = src.get(); a1 = ord.apply(x) & 0xff; if (-1 == unsignedCmp(a0, 249)) { - return (a0 - 241) * 256 + a1 + 240; + return (a0 - 241L) * 256 + a1 + 240; } x = src.get(); a2 = ord.apply(x) & 0xff; if (a0 == 249) { - return 2288 + 256 * a1 + a2; + return 2288L + 256 * a1 + a2; } x = src.get(); a3 = ord.apply(x) & 0xff; if (a0 == 250) { - return (a1 << 16) | (a2 << 8) | a3; + return ((long) a1 << 16L) | (a2 << 8) | a3; } x = src.get(); a4 = ord.apply(x) & 0xff; @@ -665,7 +665,8 @@ public class OrderedBytes { dst.put((byte) ((2 * d + 1) & 0xff)); abs = abs.subtract(BigDecimal.valueOf(d)); } - a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x + // terminal digit should be 2x + a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe); if (isNeg) { // negative values encoded as ~M DESCENDING.apply(a, offset + startM, dst.getPosition() - startM); @@ -749,8 +750,8 @@ public class OrderedBytes { dst.put((byte) (2 * d + 1)); abs = abs.subtract(BigDecimal.valueOf(d)); } - - a[offset + dst.getPosition() - 1] &= 0xfe; // terminal digit should be 2x + // terminal digit should be 2x + a[offset + dst.getPosition() - 1] = (byte) (a[offset + dst.getPosition() - 1] & 0xfe); if (isNeg) { // negative values encoded as ~M DESCENDING.apply(a, offset + startM, dst.getPosition() - startM); @@ -1065,7 +1066,8 @@ public class OrderedBytes { if (s > 1) { dst.put((byte) (0x7f & t)); } else { - dst.getBytes()[offset + dst.getPosition() - 1] &= 0x7f; + dst.getBytes()[offset + dst.getPosition() - 1] = + (byte) (dst.getBytes()[offset + dst.getPosition() - 1] & 0x7f); } } ord.apply(dst.getBytes(), offset + start, dst.getPosition() - start); @@ -1118,7 +1120,7 @@ public class OrderedBytes { ret.put((byte) (t | ((ord.apply(a[offset + i]) & 0x7f) >>> s))); } if (i == end) break; - t = (byte) ((ord.apply(a[offset + i]) << 8 - s) & 0xff); + t = (byte) ((ord.apply(a[offset + i]) << (8 - s)) & 0xff); s = s == 1 ? 7 : s - 1; } src.setPosition(end); @@ -1374,7 +1376,7 @@ public class OrderedBytes { public static int encodeFloat32(PositionedByteRange dst, float val, Order ord) { final int offset = dst.getOffset(), start = dst.getPosition(); int i = Float.floatToIntBits(val); - i ^= ((i >> Integer.SIZE - 1) | Integer.MIN_VALUE); + i ^= ((i >> (Integer.SIZE - 1)) | Integer.MIN_VALUE); dst.put(FIXED_FLOAT32) .put((byte) (i >> 24)) .put((byte) (i >> 16)) @@ -1396,7 +1398,7 @@ public class OrderedBytes { for (int i = 1; i < 4; i++) { val = (val << 8) + (ord.apply(src.get()) & 0xff); } - val ^= (~val >> Integer.SIZE - 1) | Integer.MIN_VALUE; + val ^= (~val >> (Integer.SIZE - 1)) | Integer.MIN_VALUE; return Float.intBitsToFloat(val); } @@ -1468,7 +1470,7 @@ public class OrderedBytes { public static int encodeFloat64(PositionedByteRange dst, double val, Order ord) { final int offset = dst.getOffset(), start = dst.getPosition(); long lng = Double.doubleToLongBits(val); - lng ^= ((lng >> Long.SIZE - 1) | Long.MIN_VALUE); + lng ^= ((lng >> (Long.SIZE - 1)) | Long.MIN_VALUE); dst.put(FIXED_FLOAT64) .put((byte) (lng >> 56)) .put((byte) (lng >> 48)) @@ -1494,7 +1496,7 @@ public class OrderedBytes { for (int i = 1; i < 8; i++) { val = (val << 8) + (ord.apply(src.get()) & 0xff); } - val ^= (~val >> Long.SIZE - 1) | Long.MIN_VALUE; + val ^= (~val >> (Long.SIZE - 1)) | Long.MIN_VALUE; return Double.longBitsToDouble(val); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java index 36f2fce59f..02b9b3f4fe 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Striped64.java @@ -338,6 +338,7 @@ abstract class Striped64 extends Number { try { return java.security.AccessController.doPrivileged (new java.security.PrivilegedExceptionAction() { + @Override public sun.misc.Unsafe run() throws Exception { Class k = sun.misc.Unsafe.class; for (java.lang.reflect.Field f : k.getDeclaredFields()) { diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java index 5c2bc1281d..fa02b25dcb 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Threads.java @@ -18,11 +18,13 @@ */ package org.apache.hadoop.hbase.util; +import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.nio.charset.StandardCharsets; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; @@ -298,7 +300,8 @@ public class Threads { if (PRINT_THREAD_INFO_METHOD_WITH_PRINTSTREAM) { PRINT_THREAD_INFO_METHOD.invoke(null, stream, title); } else { - PRINT_THREAD_INFO_METHOD.invoke(null, new PrintWriter(stream), title); + PRINT_THREAD_INFO_METHOD.invoke(null, + new PrintWriter(new OutputStreamWriter(stream, StandardCharsets.UTF_8)), title); } } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { throw new RuntimeException(e.getCause()); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java index 1438ab7c55..059ed0ef8b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Triple.java @@ -40,6 +40,7 @@ public class Triple { return new Triple(first, second, third); } + @Override public int hashCode() { int hashFirst = (first != null ? first.hashCode() : 0); int hashSecond = (second != null ? second.hashCode() : 0); @@ -48,6 +49,7 @@ public class Triple { return (hashFirst >> 1) ^ hashSecond ^ (hashThird << 1); } + @Override public boolean equals(Object obj) { if (!(obj instanceof Triple)) { return false; @@ -65,6 +67,7 @@ public class Triple { return true; } + @Override public String toString() { return "(" + first + ", " + second + "," + third + " )"; } @@ -93,6 +96,3 @@ public class Triple { this.third = third; } } - - - diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java index 97bce75ac5..9ebff7d8b4 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/UnsafeAccess.java @@ -89,7 +89,7 @@ public final class UnsafeAccess { destAddress = destAddress + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); destBase = dest.array(); } - long srcAddress = srcOffset + BYTE_ARRAY_BASE_OFFSET; + long srcAddress = (long) srcOffset + BYTE_ARRAY_BASE_OFFSET; unsafeCopy(src, srcAddress, destBase, destAddress, length); } @@ -123,7 +123,7 @@ public final class UnsafeAccess { srcAddress = srcAddress + BYTE_ARRAY_BASE_OFFSET + src.arrayOffset(); srcBase = src.array(); } - long destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET; + long destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET; unsafeCopy(srcBase, srcAddress, dest, destAddress, length); } @@ -144,13 +144,13 @@ public final class UnsafeAccess { if (src.isDirect()) { srcAddress = srcOffset + ((DirectBuffer) src).address(); } else { - srcAddress = srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET; + srcAddress = (long) srcOffset + src.arrayOffset() + BYTE_ARRAY_BASE_OFFSET; srcBase = src.array(); } if (dest.isDirect()) { destAddress = destOffset + ((DirectBuffer) dest).address(); } else { - destAddress = destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); + destAddress = (long) destOffset + BYTE_ARRAY_BASE_OFFSET + dest.arrayOffset(); destBase = dest.array(); } unsafeCopy(srcBase, srcAddress, destBase, destAddress, length); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java index 2e0436ca01..a28869279f 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/ClassFinder.java @@ -287,7 +287,7 @@ public class ClassFinder { return null; } - private class FileFilterWithName implements FileFilter { + private static class FileFilterWithName implements FileFilter { private FileNameFilter nameFilter; public FileFilterWithName(FileNameFilter nameFilter) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java index b7361bfcf8..2cdab5d380 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java @@ -203,6 +203,7 @@ public class HBaseCommonTestingUtility { LOG.warn("Failed to delete " + dir.getAbsolutePath(), ex); } } while (ntries < 30); - return ntries < 30; + + return false; } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java index 435f8bb443..7080078559 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellUtil.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase; import static org.junit.Assert.*; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.NavigableMap; @@ -52,7 +53,7 @@ public class TestCellUtil { /** * CellScanner used in test. */ - private class TestCellScanner implements CellScanner { + private static class TestCellScanner implements CellScanner { private int count = 0; private Cell current = null; private final int cellsCount; @@ -80,7 +81,7 @@ public class TestCellUtil { /** * Cell used in test. Has row only. */ - private class TestCell implements Cell { + private static class TestCell implements Cell { private final byte [] row; TestCell(final int i) { @@ -331,7 +332,8 @@ public class TestCellUtil { @Test public void testFindCommonPrefixInFlatKey() { // The whole key matching case - KeyValue kv1 = new KeyValue("r1".getBytes(), "f1".getBytes(), "q1".getBytes(), null); + KeyValue kv1 = new KeyValue("r1".getBytes(StandardCharsets.UTF_8), + "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(kv1.getKeyLength(), CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, true)); Assert.assertEquals(kv1.getKeyLength(), @@ -339,30 +341,35 @@ public class TestCellUtil { Assert.assertEquals(kv1.getKeyLength() - KeyValue.TIMESTAMP_TYPE_SIZE, CellUtil.findCommonPrefixInFlatKey(kv1, kv1, true, false)); // The rk length itself mismatch - KeyValue kv2 = new KeyValue("r12".getBytes(), "f1".getBytes(), "q1".getBytes(), null); + KeyValue kv2 = new KeyValue("r12".getBytes(StandardCharsets.UTF_8), + "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(1, CellUtil.findCommonPrefixInFlatKey(kv1, kv2, true, true)); // part of rk is same - KeyValue kv3 = new KeyValue("r14".getBytes(), "f1".getBytes(), "q1".getBytes(), null); - Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes().length, + KeyValue kv3 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), + "f1".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); + Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + "r1".getBytes(StandardCharsets.UTF_8).length, CellUtil.findCommonPrefixInFlatKey(kv2, kv3, true, true)); // entire rk is same but different cf name - KeyValue kv4 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q1".getBytes(), null); + KeyValue kv4 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), + "f2".getBytes(StandardCharsets.UTF_8), "q1".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE - + "f".getBytes().length, CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true)); + + "f".getBytes(StandardCharsets.UTF_8).length, + CellUtil.findCommonPrefixInFlatKey(kv3, kv4, false, true)); // rk and family are same and part of qualifier - KeyValue kv5 = new KeyValue("r14".getBytes(), "f2".getBytes(), "q123".getBytes(), null); + KeyValue kv5 = new KeyValue("r14".getBytes(StandardCharsets.UTF_8), + "f2".getBytes(StandardCharsets.UTF_8), "q123".getBytes(StandardCharsets.UTF_8), null); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv3.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv4.getFamilyLength() + kv4.getQualifierLength(), CellUtil.findCommonPrefixInFlatKey(kv4, kv5, true, true)); // rk, cf and q are same. ts differs - KeyValue kv6 = new KeyValue("rk".getBytes(), 1234L); - KeyValue kv7 = new KeyValue("rk".getBytes(), 1235L); + KeyValue kv6 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L); + KeyValue kv7 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1235L); // only last byte out of 8 ts bytes in ts part differs Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv6.getFamilyLength() + kv6.getQualifierLength() + 7, CellUtil.findCommonPrefixInFlatKey(kv6, kv7, true, true)); // rk, cf, q and ts are same. Only type differs - KeyValue kv8 = new KeyValue("rk".getBytes(), 1234L, Type.Delete); + KeyValue kv8 = new KeyValue("rk".getBytes(StandardCharsets.UTF_8), 1234L, Type.Delete); Assert.assertEquals(KeyValue.ROW_LENGTH_SIZE + kv6.getRowLength() + KeyValue.FAMILY_LENGTH_SIZE + kv6.getFamilyLength() + kv6.getQualifierLength() + KeyValue.TIMESTAMP_SIZE, CellUtil.findCommonPrefixInFlatKey(kv6, kv8, true, true)); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java index e5546f6cb1..abca0d7425 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestChoreService.java @@ -297,10 +297,10 @@ public class TestChoreService { } }; - assertEquals("Name construction failed", chore1.getName(), NAME); - assertEquals("Period construction failed", chore1.getPeriod(), PERIOD); - assertEquals("Initial Delay construction failed", chore1.getInitialDelay(), VALID_DELAY); - assertEquals("TimeUnit construction failed", chore1.getTimeUnit(), UNIT); + assertEquals("Name construction failed", NAME, chore1.getName()); + assertEquals("Period construction failed", PERIOD, chore1.getPeriod()); + assertEquals("Initial Delay construction failed", VALID_DELAY, chore1.getInitialDelay()); + assertEquals("TimeUnit construction failed", UNIT, chore1.getTimeUnit()); ScheduledChore invalidDelayChore = new ScheduledChore(NAME, new SampleStopper(), PERIOD, INVALID_DELAY, UNIT) { @@ -475,7 +475,7 @@ public class TestChoreService { Thread.sleep(chorePeriod * 10); assertEquals("Chores are missing their start time. Should expand core pool size", service.getNumberOfScheduledChores(), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 5); + assertEquals(5, service.getNumberOfChoresMissingStartTime()); // Now we begin to cancel the chores that caused an increase in the core thread pool of the // ChoreService. These cancellations should cause a decrease in the core thread pool. @@ -483,31 +483,31 @@ public class TestChoreService { Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 4); + assertEquals(4, service.getNumberOfChoresMissingStartTime()); slowChore4.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 3); + assertEquals(3, service.getNumberOfChoresMissingStartTime()); slowChore3.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 2); + assertEquals(2, service.getNumberOfChoresMissingStartTime()); slowChore2.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 1); + assertEquals(1, service.getNumberOfChoresMissingStartTime()); slowChore1.cancel(); Thread.sleep(chorePeriod * 10); assertEquals(Math.max(ChoreService.MIN_CORE_POOL_SIZE, service.getNumberOfScheduledChores()), service.getCorePoolSize()); - assertEquals(service.getNumberOfChoresMissingStartTime(), 0); + assertEquals(0, service.getNumberOfChoresMissingStartTime()); } finally { shutdownService(service); } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java index 2a8d1a29b0..1aa052b323 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestHBaseConfiguration.java @@ -191,10 +191,9 @@ public class TestHBaseConfiguration { } // Instantiate Hadoop CredentialProviderFactory try { - hadoopCredProviderFactory = hadoopCredProviderFactoryClz.newInstance(); - } catch (InstantiationException e) { - return false; - } catch (IllegalAccessException e) { + hadoopCredProviderFactory = + hadoopCredProviderFactoryClz.getDeclaredConstructor().newInstance(); + } catch (Exception e) { return false; } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java index 3baf729ded..7d3aa0e87e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestKeyValue.java @@ -640,7 +640,7 @@ public class TestKeyValue extends TestCase { assertTrue(kvA2.equals(deSerKV2)); } - private class MockKeyValue implements Cell { + private static class MockKeyValue implements Cell { private final KeyValue kv; public MockKeyValue(KeyValue kv) { diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java index 6c18dc0689..9769df0b4c 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/codec/TestKeyValueCodec.java @@ -67,7 +67,7 @@ public class TestKeyValueCodec { Codec.Encoder encoder = kvc.getEncoder(dos); final KeyValue kv = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("q"), Bytes.toBytes("v")); - final long length = kv.getLength() + Bytes.SIZEOF_INT; + final long length = (long) kv.getLength() + Bytes.SIZEOF_INT; encoder.write(kv); encoder.flush(); dos.close(); @@ -97,7 +97,7 @@ public class TestKeyValueCodec { new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("2"), Bytes.toBytes("2")); final KeyValue kv3 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("3"), Bytes.toBytes("3")); - final long length = kv1.getLength() + Bytes.SIZEOF_INT; + final long length = (long) kv1.getLength() + Bytes.SIZEOF_INT; encoder.write(kv1); encoder.write(kv2); encoder.write(kv3); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java index 95f8ba1f35..9b444f5718 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestCipherProvider.java @@ -131,8 +131,8 @@ public class TestCipherProvider { Cipher a = Encryption.getCipher(conf, "TEST"); assertNotNull(a); assertTrue(a.getProvider() instanceof MyCipherProvider); - assertEquals(a.getName(), "TEST"); - assertEquals(a.getKeyLength(), 0); + assertEquals("TEST", a.getName()); + assertEquals(0, a.getKeyLength()); } @Test @@ -147,7 +147,7 @@ public class TestCipherProvider { assertNotNull(a); assertTrue(a.getProvider() instanceof DefaultCipherProvider); assertEquals(a.getName(), algorithm); - assertEquals(a.getKeyLength(), AES.KEY_LENGTH); + assertEquals(AES.KEY_LENGTH, a.getKeyLength()); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java index 9c98272ad8..fcb0b51384 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyProvider.java @@ -44,9 +44,9 @@ public class TestKeyProvider { Key key = provider.getKey("foo"); assertNotNull("Test provider did not return a key as expected", key); - assertEquals("Test provider did not create a key for AES", key.getAlgorithm(), "AES"); - assertEquals("Test provider did not create a key of adequate length", - key.getEncoded().length, AES.KEY_LENGTH); + assertEquals("Test provider did not create a key for AES", "AES", key.getAlgorithm()); + assertEquals("Test provider did not create a key of adequate length", AES.KEY_LENGTH, + key.getEncoded().length); } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java index 9e381033a2..bc6edb81bd 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/TestKeyStoreKeyProvider.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertNotNull; import java.io.File; import java.io.FileOutputStream; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.security.Key; import java.security.KeyStore; import java.security.MessageDigest; @@ -51,7 +52,7 @@ public class TestKeyStoreKeyProvider { @BeforeClass public static void setUp() throws Exception { - KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes()); + KEY = MessageDigest.getInstance("SHA-256").digest(ALIAS.getBytes(StandardCharsets.UTF_8)); // Create a JKECS store containing a test secret key KeyStore store = KeyStore.getInstance("JCEKS"); store.load(null, PASSWORD.toCharArray()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java index 65260ea6da..55cad54ff5 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/crypto/aes/TestAES.java @@ -53,8 +53,8 @@ public class TestAES { public void testAESAlgorithm() throws Exception { Configuration conf = HBaseConfiguration.create(); Cipher aes = Encryption.getCipher(conf, "AES"); - assertEquals(aes.getKeyLength(), AES.KEY_LENGTH); - assertEquals(aes.getIvLength(), AES.IV_LENGTH); + assertEquals(AES.KEY_LENGTH, aes.getKeyLength()); + assertEquals(AES.IV_LENGTH, aes.getIvLength()); Encryptor e = aes.getEncryptor(); e.setKey(new SecretKeySpec(Bytes.fromHex("2b7e151628aed2a6abf7158809cf4f3c"), "AES")); e.setIv(Bytes.fromHex("f0f1f2f3f4f5f6f7f8f9fafbfcfdfeff")); @@ -89,8 +89,7 @@ public class TestAES { DefaultCipherProvider.getInstance().setConf(conf); AES aes = new AES(DefaultCipherProvider.getInstance()); - assertEquals("AES did not find alternate RNG", aes.getRNG().getAlgorithm(), - "TestRNG"); + assertEquals("AES did not find alternate RNG", "TestRNG", aes.getRNG().getAlgorithm()); } static class TestProvider extends Provider { @@ -98,6 +97,7 @@ public class TestAES { public TestProvider() { super("TEST", 1.0, "Test provider"); AccessController.doPrivileged(new PrivilegedAction() { + @Override public Object run() { put("SecureRandom.TestRNG", TestAES.class.getName() + "$TestRNG"); return null; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java index 6d16ec20c6..f8d0c220e8 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/io/util/TestLRUDictionary.java @@ -78,7 +78,7 @@ public class TestLRUDictionary { rand.nextBytes(testBytes); // Verify that our randomly generated array doesn't exist in the dictionary - assertEquals(testee.findEntry(testBytes, 0, testBytes.length), -1); + assertEquals(-1, testee.findEntry(testBytes, 0, testBytes.length)); // now since we looked up an entry, we should have added it to the // dictionary, so it isn't empty diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java index 71b4cd14dd..f02087c624 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/types/TestStruct.java @@ -21,6 +21,7 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import java.lang.reflect.Constructor; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -70,10 +71,15 @@ public class TestStruct { }; Object[][] pojo2Args = { - new Object[] { new byte[0], "it".getBytes(), "was", "the".getBytes() }, - new Object[] { "best".getBytes(), new byte[0], "of", "times,".getBytes() }, - new Object[] { "it".getBytes(), "was".getBytes(), "", "the".getBytes() }, - new Object[] { "worst".getBytes(), "of".getBytes(), "times,", new byte[0] }, + new Object[] { new byte[0], "it".getBytes(StandardCharsets.UTF_8), "was", + "the".getBytes(StandardCharsets.UTF_8) }, + new Object[] { "best".getBytes(StandardCharsets.UTF_8), new byte[0], "of", + "times,".getBytes(StandardCharsets.UTF_8) }, + new Object[] { "it".getBytes(StandardCharsets.UTF_8), + "was".getBytes(StandardCharsets.UTF_8), "", + "the".getBytes(StandardCharsets.UTF_8) }, + new Object[] { "worst".getBytes(StandardCharsets.UTF_8), + "of".getBytes(StandardCharsets.UTF_8), "times,", new byte[0] }, new Object[] { new byte[0], new byte[0], "", new byte[0] }, }; @@ -127,19 +133,54 @@ public class TestStruct { @Override public int compareTo(Pojo1 o) { int cmp = stringFieldAsc.compareTo(o.stringFieldAsc); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } cmp = Integer.valueOf(intFieldAsc).compareTo(Integer.valueOf(o.intFieldAsc)); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } return Double.compare(doubleFieldAsc, o.doubleFieldAsc); } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (null == o) return false; - if (!(o instanceof Pojo1)) return false; - Pojo1 that = (Pojo1) o; - return 0 == this.compareTo(that); + public int hashCode() { + final int prime = 31; + int result = 1; + long temp; + temp = Double.doubleToLongBits(doubleFieldAsc); + result = prime * result + (int) (temp ^ (temp >>> 32)); + result = prime * result + intFieldAsc; + result = prime * result + ((stringFieldAsc == null) ? 0 : stringFieldAsc.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Pojo1 other = (Pojo1) obj; + if (Double.doubleToLongBits(doubleFieldAsc) != Double.doubleToLongBits(other.doubleFieldAsc)) { + return false; + } + if (intFieldAsc != other.intFieldAsc) { + return false; + } + if (stringFieldAsc == null) { + if (other.stringFieldAsc != null) { + return false; + } + } else if (!stringFieldAsc.equals(other.stringFieldAsc)) { + return false; + } + return true; } } @@ -178,24 +219,69 @@ public class TestStruct { @Override public int compareTo(Pojo2 o) { int cmp = NULL_SAFE_BYTES_COMPARATOR.compare(byteField1Asc, o.byteField1Asc); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } cmp = -NULL_SAFE_BYTES_COMPARATOR.compare(byteField2Dsc, o.byteField2Dsc); - if (cmp != 0) return cmp; - if (stringFieldDsc == o.stringFieldDsc) cmp = 0; - else if (null == stringFieldDsc) cmp = 1; - else if (null == o.stringFieldDsc) cmp = -1; + if (cmp != 0) { + return cmp; + } + if (null == stringFieldDsc) { + cmp = 1; + } + else if (null == o.stringFieldDsc) { + cmp = -1; + } + else if (stringFieldDsc.equals(o.stringFieldDsc)) { + cmp = 0; + } else cmp = -stringFieldDsc.compareTo(o.stringFieldDsc); - if (cmp != 0) return cmp; + if (cmp != 0) { + return cmp; + } return -NULL_SAFE_BYTES_COMPARATOR.compare(byteField3Dsc, o.byteField3Dsc); } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (null == o) return false; - if (!(o instanceof Pojo2)) return false; - Pojo2 that = (Pojo2) o; - return 0 == this.compareTo(that); + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + Arrays.hashCode(byteField1Asc); + result = prime * result + Arrays.hashCode(byteField2Dsc); + result = prime * result + Arrays.hashCode(byteField3Dsc); + result = prime * result + ((stringFieldDsc == null) ? 0 : stringFieldDsc.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Pojo2 other = (Pojo2) obj; + if (!Arrays.equals(byteField1Asc, other.byteField1Asc)) { + return false; + } + if (!Arrays.equals(byteField2Dsc, other.byteField2Dsc)) { + return false; + } + if (!Arrays.equals(byteField3Dsc, other.byteField3Dsc)) { + return false; + } + if (stringFieldDsc == null) { + if (other.stringFieldDsc != null) { + return false; + } + } else if (!stringFieldDsc.equals(other.stringFieldDsc)) { + return false; + } + return true; } } diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java index ef213ee4f9..79d2f3f357 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java @@ -24,6 +24,8 @@ import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.FileWriter; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.ArrayList; import java.util.List; import java.util.jar.JarEntry; @@ -127,7 +129,7 @@ public class ClassLoaderTestHelper { File srcDirPath = new File(srcDir.toString()); srcDirPath.mkdirs(); File sourceCodeFile = new File(srcDir.toString(), className + ".java"); - BufferedWriter bw = new BufferedWriter(new FileWriter(sourceCodeFile)); + BufferedWriter bw = Files.newBufferedWriter(sourceCodeFile.toPath(), StandardCharsets.UTF_8); bw.write(javaCode); bw.close(); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java index 3ec0afbcd6..7771f8773e 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java @@ -157,9 +157,9 @@ public class TestBytes extends TestCase { float [] floats = {-1f, 123.123f, Float.MAX_VALUE}; for (int i = 0; i < floats.length; i++) { byte [] b = Bytes.toBytes(floats[i]); - assertEquals(floats[i], Bytes.toFloat(b)); + assertEquals(floats[i], Bytes.toFloat(b), 0.0f); byte [] b2 = bytesWithOffset(b); - assertEquals(floats[i], Bytes.toFloat(b2, 1)); + assertEquals(floats[i], Bytes.toFloat(b2, 1), 0.0f); } } @@ -167,9 +167,9 @@ public class TestBytes extends TestCase { double [] doubles = {Double.MIN_VALUE, Double.MAX_VALUE}; for (int i = 0; i < doubles.length; i++) { byte [] b = Bytes.toBytes(doubles[i]); - assertEquals(doubles[i], Bytes.toDouble(b)); + assertEquals(doubles[i], Bytes.toDouble(b), 0.0); byte [] b2 = bytesWithOffset(b); - assertEquals(doubles[i], Bytes.toDouble(b2, 1)); + assertEquals(doubles[i], Bytes.toDouble(b2, 1), 0.0); } } @@ -431,13 +431,13 @@ public class TestBytes extends TestCase { public void testUnsignedBinarySearch(){ byte[] bytes = new byte[]{0,5,123,127,-128,-100,-1}; - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)5), 1); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)127), 3); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-128), 4); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-100), 5); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-1), 6); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)2), -1-1); - Assert.assertEquals(Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-5), -6-1); + Assert.assertEquals(1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)5)); + Assert.assertEquals(3, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)127)); + Assert.assertEquals(4, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-128)); + Assert.assertEquals(5, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-100)); + Assert.assertEquals(6, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-1)); + Assert.assertEquals(-1-1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)2)); + Assert.assertEquals(-6-1, Bytes.unsignedBinarySearch(bytes, 0, bytes.length, (byte)-5)); } public void testUnsignedIncrement(){ diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java index 9b4ddb55be..5b59f0ae37 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestConcatenatedLists.java @@ -115,6 +115,8 @@ public class TestConcatenatedLists { } @SuppressWarnings("ModifyingCollectionWithItself") + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="DMI_VACUOUS_SELF_COLLECTION_CALL", + justification="Intended vacuous containsAll call on 'c'") private void verify(ConcatenatedLists c, int last) { assertEquals((last == -1), c.isEmpty()); assertEquals(last + 1, c.size()); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java index e1048da79b..dedd269f23 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorClassLoader.java @@ -86,8 +86,11 @@ public class TestCoprocessorClassLoader { private void checkingLibJarName(String jarName, String libPrefix) throws Exception { File tmpFolder = new File(ClassLoaderTestHelper.localDirPath(conf), "tmp"); if (tmpFolder.exists()) { // Clean up the tmp folder - for (File f: tmpFolder.listFiles()) { - f.delete(); + File[] files = tmpFolder.listFiles(); + if (files != null) { + for (File f: files) { + f.delete(); + } } } String className = "CheckingLibJarName"; @@ -101,10 +104,13 @@ public class TestCoprocessorClassLoader { ClassLoader classLoader = CoprocessorClassLoader.getClassLoader(path, parent, "112", conf); assertNotNull("Classloader should be created", classLoader); String fileToLookFor = "." + className + ".jar"; - for (String f: tmpFolder.list()) { - if (f.endsWith(fileToLookFor) && f.contains(jarName)) { - // Cool, found it; - return; + String[] files = tmpFolder.list(); + if (files != null) { + for (String f: files) { + if (f.endsWith(fileToLookFor) && f.contains(jarName)) { + // Cool, found it; + return; + } } } fail("Could not find the expected lib jar file"); diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java index 5f575e6c07..3767e871ff 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; +import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.Random; import java.util.Set; @@ -40,8 +41,8 @@ public class TestLoadTestKVGenerator { @Test public void testValueLength() { for (int i = 0; i < 1000; ++i) { - byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(), - String.valueOf(rand.nextInt()).getBytes()); + byte[] v = gen.generateRandomSizeValue(Integer.toString(i).getBytes(StandardCharsets.UTF_8), + String.valueOf(rand.nextInt()).getBytes(StandardCharsets.UTF_8)); assertTrue(MIN_LEN <= v.length); assertTrue(v.length <= MAX_LEN); } @@ -51,8 +52,8 @@ public class TestLoadTestKVGenerator { public void testVerification() { for (int i = 0; i < 1000; ++i) { for (int qualIndex = 0; qualIndex < 20; ++qualIndex) { - byte[] qual = String.valueOf(qualIndex).getBytes(); - byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(); + byte[] qual = String.valueOf(qualIndex).getBytes(StandardCharsets.UTF_8); + byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes(StandardCharsets.UTF_8); byte[] v = gen.generateRandomSizeValue(rowKey, qual); assertTrue(LoadTestKVGenerator.verify(v, rowKey, qual)); v[0]++; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java index 8f213b77cd..c5d661f7a2 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestOrderedBytes.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; @@ -871,7 +872,9 @@ public class TestOrderedBytes { @Test public void testBlobVar() { byte[][] vals = - { "".getBytes(), "foo".getBytes(), "foobarbazbub".getBytes(), + { "".getBytes(StandardCharsets.UTF_8), + "foo".getBytes(StandardCharsets.UTF_8), + "foobarbazbub".getBytes(StandardCharsets.UTF_8), { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, /* 7 bytes of alternating bits; testing around HBASE-9893 */ }, { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, @@ -886,8 +889,14 @@ public class TestOrderedBytes { { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, /* 14 bytes of alternating bits; testing around HBASE-9893 */ }, - "1".getBytes(), "22".getBytes(), "333".getBytes(), "4444".getBytes(), - "55555".getBytes(), "666666".getBytes(), "7777777".getBytes(), "88888888".getBytes() + "1".getBytes(StandardCharsets.UTF_8), + "22".getBytes(StandardCharsets.UTF_8), + "333".getBytes(StandardCharsets.UTF_8), + "4444".getBytes(StandardCharsets.UTF_8), + "55555".getBytes(StandardCharsets.UTF_8), + "666666".getBytes(StandardCharsets.UTF_8), + "7777777".getBytes(StandardCharsets.UTF_8), + "88888888".getBytes(StandardCharsets.UTF_8) }; /* @@ -958,7 +967,9 @@ public class TestOrderedBytes { @Test public void testBlobCopy() { byte[][] vals = - { "".getBytes(), "foo".getBytes(), "foobarbazbub".getBytes(), + { "".getBytes(StandardCharsets.UTF_8), + "foo".getBytes(StandardCharsets.UTF_8), + "foobarbazbub".getBytes(StandardCharsets.UTF_8), { (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa, (byte) 0xaa }, { (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, (byte) 0x55, @@ -1033,9 +1044,9 @@ public class TestOrderedBytes { byte[] a = new byte[3 + (Order.ASCENDING == ord ? 1 : 2) + 2]; PositionedByteRange buf = new SimplePositionedMutableByteRange(a, 1, 3 + (Order.ASCENDING == ord ? 1 : 2)); - OrderedBytes.encodeBlobCopy(buf, "foobarbaz".getBytes(), 3, 3, ord); + OrderedBytes.encodeBlobCopy(buf, "foobarbaz".getBytes(StandardCharsets.UTF_8), 3, 3, ord); buf.setPosition(0); - assertArrayEquals("bar".getBytes(), OrderedBytes.decodeBlobCopy(buf)); + assertArrayEquals("bar".getBytes(StandardCharsets.UTF_8), OrderedBytes.decodeBlobCopy(buf)); } } @@ -1239,7 +1250,7 @@ public class TestOrderedBytes { buff.setPosition(0); assertEquals(OrderedBytes.length(buff), cnt); for (int i = 0; i < cnt; i++) { - assertEquals(OrderedBytes.isEncodedValue(buff), true); + assertEquals(true, OrderedBytes.isEncodedValue(buff)); OrderedBytes.skip(buff); } } diff --git a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/HistogramImpl.java b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/HistogramImpl.java index b52caf8f67..17a179dafa 100644 --- a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/HistogramImpl.java +++ b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/HistogramImpl.java @@ -67,6 +67,7 @@ public class HistogramImpl implements Histogram { histogram.add(value, 1); } + @Override public long getCount() { return counter.getCount(); } @@ -75,6 +76,7 @@ public class HistogramImpl implements Histogram { return this.histogram.getMax(); } + @Override public Snapshot snapshot() { return histogram.snapshotAndReset(); } diff --git a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/MetricRegistriesImpl.java b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/MetricRegistriesImpl.java index 3788bd1d57..29664295c6 100644 --- a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/MetricRegistriesImpl.java +++ b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/MetricRegistriesImpl.java @@ -60,22 +60,27 @@ public class MetricRegistriesImpl extends MetricRegistries { }); } + @Override public boolean remove(MetricRegistryInfo key) { return registries.remove(key) == null; } + @Override public Optional get(MetricRegistryInfo info) { return Optional.fromNullable(registries.get(info)); } + @Override public Collection getMetricRegistries() { return Collections.unmodifiableCollection(registries.values()); } + @Override public void clear() { registries.clear(); } + @Override public Set getMetricRegistryInfos() { return Collections.unmodifiableSet(registries.keySet()); } diff --git a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/RefCountingMap.java b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/RefCountingMap.java index 889b026e7f..25c763461a 100644 --- a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/RefCountingMap.java +++ b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/RefCountingMap.java @@ -24,7 +24,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -42,22 +42,24 @@ class RefCountingMap { private ConcurrentHashMap> map = new ConcurrentHashMap<>(); private static class Payload { V v; - volatile int refCount; + final AtomicInteger refCount = new AtomicInteger(1); // create with ref count = 1 Payload(V v) { this.v = v; - this.refCount = 1; // create with ref count = 1 } } + @edu.umd.cs.findbugs.annotations.SuppressWarnings( + value="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION", + justification="We use the object monitor to serialize operations on the concurrent map") V put(K key, Supplier supplier) { - synchronized (map) { + synchronized (this) { Payload oldValue = map.get(key); if (oldValue == null) { oldValue = new Payload(supplier.get()); map.put(key, oldValue); return oldValue.v; } - oldValue.refCount++; + oldValue.refCount.incrementAndGet(); return oldValue.v; } } @@ -73,10 +75,10 @@ class RefCountingMap { * @return the value associated with the specified key or null if key is removed from map. */ V remove(K key) { - synchronized (map) { + synchronized (this) { Payload oldValue = map.get(key); if (oldValue != null) { - if (--oldValue.refCount == 0) { + if (oldValue.refCount.decrementAndGet() == 0) { map.remove(key); return null; } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java index e61d45de59..20ffa5cfd3 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java @@ -186,7 +186,7 @@ public class RowSectionWriter { ArrayList outs = Lists.newArrayList(); for (int i = ins.size() - 1; i >= 0; --i) { TokenizerNode n = ins.get(i); - if (n.isLeaf() && leaves || (!n.isLeaf() && !leaves)) { + if ((n.isLeaf() && leaves) || (!n.isLeaf() && !leaves)) { outs.add(ins.get(i)); } } diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java index 25bee1f8bd..64653f9ae7 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java @@ -312,7 +312,7 @@ public class TokenizerNode{ public void appendNodesToExternalList(List appendTo, boolean includeNonLeaves, boolean includeLeaves) { - if (includeNonLeaves && !isLeaf() || includeLeaves && isLeaf()) { + if ((includeNonLeaves && !isLeaf()) || (includeLeaves && isLeaf())) { appendTo.add(this); } for (int i = 0; i < children.size(); ++i) { diff --git a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java index a3da9f0129..a4eb64b593 100644 --- a/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java +++ b/hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/util/vint/UFIntTool.java @@ -72,7 +72,7 @@ public class UFIntTool { public static void writeBytes(int outputWidth, final long value, byte[] bytes, int offset) { bytes[offset + outputWidth - 1] = (byte) value; for (int i = outputWidth - 2; i >= 0; --i) { - bytes[offset + i] = (byte) (value >>> (outputWidth - i - 1) * 8); + bytes[offset + i] = (byte) (value >>> ((outputWidth - i - 1) * 8)); } } diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java index fb195b65d9..55b26acef7 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPrettyPrinter.java @@ -149,6 +149,7 @@ public class ProcedureWALPrettyPrinter extends Configured implements Tool { * @throws IOException * Thrown upon file system errors etc. */ + @Override public int run(final String[] args) throws IOException { // create options Options options = new Options(); diff --git a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/ByteSlot.java b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/ByteSlot.java index c4ed9b76cb..1a48f49cf4 100644 --- a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/ByteSlot.java +++ b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/util/ByteSlot.java @@ -81,11 +81,13 @@ public class ByteSlot extends OutputStream { buf[offset] = (byte)b; } + @Override public void write(int b) { ensureCapacity(size + 1); buf[size++] = (byte)b; } + @Override public void write(byte[] b, int off, int len) { ensureCapacity(size + len); System.arraycopy(b, off, buf, size, len); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java index 61f2333fa6..0937f70b51 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestChildProcedures.java @@ -146,6 +146,7 @@ public class TestChildProcedures { public static class TestRootProcedure extends SequentialProcedure { public TestRootProcedure() {} + @Override public Procedure[] execute(TestProcEnv env) { if (env.toggleKillBeforeStoreUpdate) { ProcedureTestingUtility.toggleKillBeforeStoreUpdate(procExecutor); @@ -153,6 +154,7 @@ public class TestChildProcedures { return new Procedure[] { new TestChildProcedure(), new TestChildProcedure() }; } + @Override public void rollback(TestProcEnv env) { } @@ -165,6 +167,7 @@ public class TestChildProcedures { public static class TestChildProcedure extends SequentialProcedure { public TestChildProcedure() {} + @Override public Procedure[] execute(TestProcEnv env) { if (env.toggleKillBeforeStoreUpdate) { ProcedureTestingUtility.toggleKillBeforeStoreUpdate(procExecutor); @@ -175,6 +178,7 @@ public class TestChildProcedures { return null; } + @Override public void rollback(TestProcEnv env) { } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java index 8bc8fa8beb..5e8f201ed2 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureInMemoryChore.java @@ -33,7 +33,6 @@ import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -49,6 +48,7 @@ public class TestProcedureInMemoryChore { private HBaseCommonTestingUtility htu; + @SuppressWarnings("rawtypes") @Before public void setUp() throws IOException { htu = new HBaseCommonTestingUtility(); diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java index c51f3cf7be..2557c982b4 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestProcedureReplayOrder.java @@ -133,8 +133,9 @@ public class TestProcedureReplayOrder { public void run() { for (int i = 0; i < nprocPerThread; ++i) { try { - procExecutor.submitProcedure((Procedure)procClazz.newInstance()); - } catch (InstantiationException|IllegalAccessException e) { + procExecutor.submitProcedure((Procedure) + procClazz.getDeclaredConstructor().newInstance()); + } catch (Exception e) { LOG.error("unable to instantiate the procedure", e); fail("failure during the proc.newInstance(): " + e.getMessage()); } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java index 211d06d116..ba362917ed 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/TestYieldProcedures.java @@ -193,7 +193,7 @@ public class TestYieldProcedures { extends StateMachineProcedure { enum State { STATE_1, STATE_2, STATE_3 } - public class ExecutionInfo { + public static class ExecutionInfo { private final boolean rollback; private final long timestamp; private final State step; @@ -358,7 +358,8 @@ public class TestYieldProcedures { public TestRunQueue() {} - public void addFront(final Procedure proc) { + @Override + public void addFront(final Procedure proc) { addFrontCalls++; super.addFront(proc); } diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALLoaderPerformanceEvaluation.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALLoaderPerformanceEvaluation.java index 401b859719..d34d2594db 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALLoaderPerformanceEvaluation.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALLoaderPerformanceEvaluation.java @@ -72,7 +72,7 @@ public class ProcedureWALLoaderPerformanceEvaluation extends AbstractHBaseTool { private WALProcedureStore store; static byte[] serializedState; - private class LoadCounter implements ProcedureStore.ProcedureLoader { + private static class LoadCounter implements ProcedureStore.ProcedureLoader { public LoadCounter() {} @Override @@ -85,7 +85,7 @@ public class ProcedureWALLoaderPerformanceEvaluation extends AbstractHBaseTool { if (procIter.isNextCompleted()) { ProcedureInfo proc = procIter.nextAsProcedureInfo(); } else { - Procedure proc = procIter.nextAsProcedure(); + Procedure proc = procIter.nextAsProcedure(); } } } @@ -93,7 +93,7 @@ public class ProcedureWALLoaderPerformanceEvaluation extends AbstractHBaseTool { @Override public void handleCorrupted(ProcedureIterator procIter) throws IOException { while (procIter.hasNext()) { - Procedure proc = procIter.nextAsProcedure(); + Procedure proc = procIter.nextAsProcedure(); } } } @@ -171,8 +171,7 @@ public class ProcedureWALLoaderPerformanceEvaluation extends AbstractHBaseTool { private void writeWals() throws IOException { List procStates = shuffleProcWriteSequence(); TestProcedure[] procs = new TestProcedure[numProcs + 1]; // 0 is not used. - int numProcsPerWal = numWals > 0 ? (int)Math.ceil(procStates.size() / numWals) - : Integer.MAX_VALUE; + int numProcsPerWal = numWals > 0 ? procStates.size() / numWals : Integer.MAX_VALUE; long startTime = currentTimeMillis(); long lastTime = startTime; for (int i = 0; i < procStates.size(); ++i) { diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPerformanceEvaluation.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPerformanceEvaluation.java index f657822b00..7565c41a38 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPerformanceEvaluation.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/ProcedureWALPerformanceEvaluation.java @@ -237,7 +237,7 @@ public class ProcedureWALPerformanceEvaluation extends AbstractHBaseTool { } } - public class NoSyncWalProcedureStore extends WALProcedureStore { + public static class NoSyncWalProcedureStore extends WALProcedureStore { public NoSyncWalProcedureStore(final Configuration conf, final FileSystem fs, final Path logDir) { super(conf, fs, logDir, new WALProcedureStore.LeaseRecovery() { diff --git a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java index 3c52ea1df0..0e034a0988 100644 --- a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java +++ b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/util/TestTimeoutBlockingQueue.java @@ -53,6 +53,7 @@ public class TestTimeoutBlockingQueue { return timeout; } + @Override public String toString() { return String.format("(%03d, %03d)", seqId, timeout); } diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java index cc51c85965..0a88fdebb5 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java @@ -377,6 +377,7 @@ public class RowSpec { this.endTime = endTime; } + @Override public String toString() { StringBuilder result = new StringBuilder(); result.append("{startRow => '"); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java index 7c3e1fd9bb..e3197041b9 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java @@ -18,6 +18,8 @@ */ package org.apache.hadoop.hbase.rest; +import java.util.Arrays; + import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -78,7 +80,7 @@ public class HBaseRESTTestingUtility { conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*"); RESTServer.addCSRFFilter(context, conf); HttpServerUtil.constrainHttpMethods(context); - LOG.info("Loaded filter classes :" + filterClasses); + LOG.info("Loaded filter classes :" + Arrays.toString(filterClasses)); // start the server server.start(); // get the port diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java index ad1ca29556..9a3ec2481d 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java @@ -487,10 +487,11 @@ public class PerformanceEvaluation extends Configured implements Tool { return clazz; } + @Override protected void map(NullWritable key, PeInputSplit value, final Context context) throws IOException, InterruptedException { - Status status = new Status() { + @Override public void setStatus(String msg) { context.setStatus(msg); } @@ -632,6 +633,7 @@ public class PerformanceEvaluation extends Configured implements Tool { long elapsedTime = pe.runOneClient(cmd, index * perClientRows, perClientRows, R, flushCommits, writeToWAL, useTags, noOfTags, connection, new Status() { + @Override public void setStatus(final String msg) throws IOException { LOG.info("client-" + getName() + " " + msg); } @@ -952,6 +954,7 @@ public class PerformanceEvaluation extends Configured implements Tool { super(conf, options, status); } + @Override void testSetup() throws IOException { this.table = connection.getTable(tableName); } @@ -971,10 +974,12 @@ public class PerformanceEvaluation extends Configured implements Tool { this.flushCommits = options.isFlushCommits(); } + @Override void testSetup() throws IOException { this.mutator = connection.getBufferedMutator(tableName); } + @Override void testTakedown() throws IOException { if (flushCommits) { this.mutator.flush(); @@ -1323,6 +1328,7 @@ public class PerformanceEvaluation extends Configured implements Tool { private void runNIsOne(final Class cmd) { Status status = new Status() { + @Override public void setStatus(String msg) throws IOException { LOG.info(msg); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java index 00abe3af94..9da9761717 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/RowResourceBase.java @@ -151,7 +151,7 @@ public class RowResourceBase { protected static void checkValueXML(String url, String table, String row, String column, String value) throws IOException, JAXBException { Response response = getValueXML(url); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); @@ -164,7 +164,7 @@ public class RowResourceBase { protected static void checkValueXML(String table, String row, String column, String value) throws IOException, JAXBException { Response response = getValueXML(table, row, column); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); @@ -176,11 +176,11 @@ public class RowResourceBase { protected static void checkIncrementValueXML(String table, String row, String column, long value) throws IOException, JAXBException { - Response response1 = getValueXML(table, row, column); - assertEquals(response1.getCode(), 200); - assertEquals(Constants.MIMETYPE_XML, response1.getHeader("content-type")); + Response response = getValueXML(table, row, column); + assertEquals(200, response.getCode()); + assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) - xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response1.getBody())); + xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); RowModel rowModel = cellSet.getRows().get(0); CellModel cell = rowModel.getCells().get(0); assertEquals(Bytes.toString(cell.getColumn()), column); @@ -234,7 +234,7 @@ public class RowResourceBase { protected static void checkValuePB(String table, String row, String column, String value) throws IOException { Response response = getValuePB(table, row, column); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); CellSetModel cellSet = new CellSetModel(); cellSet.getObjectFromMessage(response.getBody()); @@ -247,7 +247,7 @@ public class RowResourceBase { protected static void checkIncrementValuePB(String table, String row, String column, long value) throws IOException { Response response = getValuePB(table, row, column); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); CellSetModel cellSet = new CellSetModel(); cellSet.getObjectFromMessage(response.getBody()); @@ -533,7 +533,7 @@ public class RowResourceBase { protected static void checkValueJSON(String table, String row, String column, String value) throws IOException, JAXBException { Response response = getValueJson(table, row, column); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); ObjectMapper mapper = new JacksonProvider() .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); @@ -547,7 +547,7 @@ public class RowResourceBase { protected static void checkIncrementValueJSON(String table, String row, String column, long value) throws IOException, JAXBException { Response response = getValueJson(table, row, column); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); ObjectMapper mapper = new JacksonJaxbJsonProvider() .locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java index 5af3831ccf..6e3827dd93 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestDeleteRow.java @@ -35,7 +35,7 @@ public class TestDeleteRow extends RowResourceBase { @Test public void testDeleteNonExistentColumn() throws Exception { Response response = putValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = checkAndDeleteJson(TABLE, ROW_1, COLUMN_1, VALUE_2); assertEquals(304, response.getCode()); @@ -61,39 +61,39 @@ public class TestDeleteRow extends RowResourceBase { @Test public void testDeleteXML() throws IOException, JAXBException { Response response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = putValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); response = deleteValue(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = getValueXML(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = checkAndDeletePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = getValueXML(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = getValueXML(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); response = getValueXML(TABLE, ROW_1, COLUMN_2); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //Delete a row in non existent table response = deleteValue("dummy", ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //Delete non existent column response = deleteValue(TABLE, ROW_1, "dummy"); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); } } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java index 25b445e762..ba0f9288dc 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java @@ -25,10 +25,8 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.StringWriter; import java.net.URLEncoder; -import java.util.Dictionary; import java.util.HashMap; import java.util.List; -import java.util.Map; import javax.xml.bind.JAXBException; @@ -57,92 +55,92 @@ public class TestGetAndPutResource extends RowResourceBase { conf.set("hbase.rest.readonly", "true"); Response response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_2); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_2); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); response = deleteValue(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); response = checkAndDeletePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); conf.set("hbase.rest.readonly", "false"); response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2, VALUE_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteValue(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testSingleCellGetPutXML() throws IOException, JAXBException { Response response = getValueXML(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2); response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2, VALUE_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_3); response = checkAndDeleteXML(TABLE, ROW_1, COLUMN_1, VALUE_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testSingleCellGetPutPB() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2); response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2, VALUE_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_3); response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_3, VALUE_4); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_4); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testMultipleCellCheckPutPB() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // Add 2 Columns to setup the test response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); response = putValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); HashMap otherCells = new HashMap(); @@ -150,32 +148,32 @@ public class TestGetAndPutResource extends RowResourceBase { // On Success update both the cells response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_3, otherCells); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_3); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_3); // On Failure, we dont update any cells response = checkAndPutValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_4, otherCells); - assertEquals(response.getCode(), 304); + assertEquals(304, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_3); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_3); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testMultipleCellCheckPutXML() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // Add 2 Columns to setup the test response = putValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); response = putValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_2); HashMap otherCells = new HashMap(); @@ -183,36 +181,36 @@ public class TestGetAndPutResource extends RowResourceBase { // On Success update both the cells response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_3, otherCells); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_3); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_3); // On Failure, we dont update any cells response = checkAndPutValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1, VALUE_4, otherCells); - assertEquals(response.getCode(), 304); + assertEquals(304, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_3); checkValueXML(TABLE, ROW_1, COLUMN_2, VALUE_3); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testMultipleCellCheckDeletePB() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // Add 3 Columns to setup the test response = putValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); response = putValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); response = putValuePB(TABLE, ROW_1, COLUMN_3, VALUE_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_3, VALUE_3); // Deletes the following columns based on Column1 check @@ -222,44 +220,45 @@ public class TestGetAndPutResource extends RowResourceBase { // On Success update both the cells response = checkAndDeletePB(TABLE, ROW_1, COLUMN_1, VALUE_1, cellsToDelete); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); response = getValuePB(TABLE, ROW_1, COLUMN_2); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); response = getValuePB(TABLE, ROW_1, COLUMN_3); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); response = putValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); response = putValuePB(TABLE, ROW_1, COLUMN_3, VALUE_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_3, VALUE_3); // On Failure, we dont update any cells response = checkAndDeletePB(TABLE, ROW_1, COLUMN_1, VALUE_3, cellsToDelete); - assertEquals(response.getCode(), 304); + assertEquals(304, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); checkValuePB(TABLE, ROW_1, COLUMN_2, VALUE_2); checkValuePB(TABLE, ROW_1, COLUMN_3, VALUE_3); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } - @Test + + @Test public void testSingleCellGetPutBinary() throws IOException { final String path = "/" + TABLE + "/" + ROW_3 + "/" + COLUMN_1; final byte[] body = Bytes.toBytes(VALUE_3); Response response = client.put(path, Constants.MIMETYPE_BINARY, body); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); Thread.yield(); response = client.get(path, Constants.MIMETYPE_BINARY); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_BINARY, response.getHeader("content-type")); assertTrue(Bytes.equals(response.getBody(), body)); boolean foundTimestampHeader = false; @@ -272,7 +271,7 @@ public class TestGetAndPutResource extends RowResourceBase { assertTrue(foundTimestampHeader); response = deleteRow(TABLE, ROW_3); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -280,13 +279,13 @@ public class TestGetAndPutResource extends RowResourceBase { final String path = "/" + TABLE + "/" + ROW_4 + "/" + COLUMN_1; Response response = client.put(path, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_4)); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); Thread.yield(); response = client.get(path, Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); response = deleteRow(TABLE, ROW_4); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -304,10 +303,10 @@ public class TestGetAndPutResource extends RowResourceBase { String jsonString = jsonMapper.writeValueAsString(cellSetModel); Response response = client.put(path, Constants.MIMETYPE_JSON, Bytes.toBytes(jsonString)); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); Thread.yield(); response = client.get(path, Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); CellSetModel cellSet = jsonMapper.readValue(response.getBody(), CellSetModel.class); assertTrue(cellSet.getRows().size() == 1); @@ -316,7 +315,7 @@ public class TestGetAndPutResource extends RowResourceBase { assertEquals(VALUE_2 , Bytes.toString(cell.getValue())); assertEquals(2L , cell.getTimestamp()); response = deleteRow(TABLE, ROW_4); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -332,7 +331,7 @@ public class TestGetAndPutResource extends RowResourceBase { Response response; response = putValueXML(path.toString(), TABLE, urlKey, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(path.toString(), TABLE, urlKey, COLUMN_1, VALUE_1); } @@ -342,13 +341,10 @@ public class TestGetAndPutResource extends RowResourceBase { final String badPath = "/" + TABLE + "/" + ROW_1 + "/" + "BAD"; Response response = client.post(goodPath, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_1)); - assertEquals(response.getCode(), 200); - assertEquals(client.get(goodPath, Constants.MIMETYPE_BINARY).getCode(), - 200); - assertEquals(client.get(badPath, Constants.MIMETYPE_BINARY).getCode(), - 404); - assertEquals(client.get(goodPath, Constants.MIMETYPE_BINARY).getCode(), - 200); + assertEquals(200, response.getCode()); + assertEquals(200, client.get(goodPath, Constants.MIMETYPE_BINARY).getCode()); + assertEquals(404, client.get(badPath, Constants.MIMETYPE_BINARY).getCode()); + assertEquals(200, client.get(goodPath, Constants.MIMETYPE_BINARY).getCode()); } @Test @@ -376,7 +372,7 @@ public class TestGetAndPutResource extends RowResourceBase { // make sure the fake row was not actually created response = client.get(path, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // check that all of the values were created checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); @@ -385,9 +381,9 @@ public class TestGetAndPutResource extends RowResourceBase { checkValueXML(TABLE, ROW_2, COLUMN_2, VALUE_4); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -413,7 +409,7 @@ public class TestGetAndPutResource extends RowResourceBase { // make sure the fake row was not actually created response = client.get(path, Constants.MIMETYPE_PROTOBUF); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // check that all of the values were created checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); @@ -422,9 +418,9 @@ public class TestGetAndPutResource extends RowResourceBase { checkValuePB(TABLE, ROW_2, COLUMN_2, VALUE_4); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -469,7 +465,7 @@ public class TestGetAndPutResource extends RowResourceBase { Response response = client.put(path, Constants.MIMETYPE_XML, Bytes.toBytes(writer.toString())); - assertEquals(response.getCode(), 400); + assertEquals(400, response.getCode()); } @Test @@ -487,7 +483,7 @@ public class TestGetAndPutResource extends RowResourceBase { Response response = client.put(path, Constants.MIMETYPE_XML, Bytes.toBytes(writer.toString())); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); } @Test @@ -515,7 +511,7 @@ public class TestGetAndPutResource extends RowResourceBase { // make sure the fake row was not actually created response = client.get(path, Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // check that all of the values were created checkValueJSON(TABLE, ROW_1, COLUMN_1, VALUE_1); @@ -524,9 +520,9 @@ public class TestGetAndPutResource extends RowResourceBase { checkValueJSON(TABLE, ROW_2, COLUMN_2, VALUE_4); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -534,13 +530,13 @@ public class TestGetAndPutResource extends RowResourceBase { final String path = "/" + TABLE + "/" + ROW_4 + "/" + COLUMN_1; Response response = client.put(path, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_4)); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); Thread.yield(); response = client.get(path, Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); response = deleteRow(TABLE, ROW_4); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); UserProvider userProvider = UserProvider.instantiate(conf); METRICS_ASSERT.assertCounterGt("requests", 2l, @@ -573,7 +569,7 @@ public class TestGetAndPutResource extends RowResourceBase { // make sure the fake row was not actually created response = client.get(path, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // Try getting all the column values at once. path = "/" + TABLE + "/" + ROW_1 + "/" + COLUMN_1 + "," + COLUMN_2 + "," + COLUMN_3; @@ -589,7 +585,7 @@ public class TestGetAndPutResource extends RowResourceBase { assertTrue(containsCellModel(cells, COLUMN_2, VALUE_2)); assertTrue(containsCellModel(cells, COLUMN_3, VALUE_2)); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } private boolean containsCellModel(List cells, String column, String value) { @@ -629,7 +625,7 @@ public class TestGetAndPutResource extends RowResourceBase { // make sure the fake row was not actually created response = client.get(path, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // check that all of the values were created StringBuilder query = new StringBuilder(); @@ -638,16 +634,16 @@ public class TestGetAndPutResource extends RowResourceBase { query.append('/'); query.append("testrow*"); response = client.get(query.toString(), Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); assertTrue(cellSet.getRows().size() == 2); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -675,7 +671,7 @@ public class TestGetAndPutResource extends RowResourceBase { // make sure the fake row was not actually created response = client.get(path, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); // check that all of the values were created StringBuilder query = new StringBuilder(); @@ -686,7 +682,7 @@ public class TestGetAndPutResource extends RowResourceBase { query.append('/'); query.append(COLUMN_1); response = client.get(query.toString(), Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) xmlUnmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); @@ -697,114 +693,113 @@ public class TestGetAndPutResource extends RowResourceBase { assertEquals(COLUMN_1, Bytes.toString(row.getCells().get(0).getColumn())); } response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = deleteRow(TABLE, ROW_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testAppendXML() throws IOException, JAXBException { Response response = getValueXML(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //append cell response = appendValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1); response = appendValueXML(TABLE, ROW_1, COLUMN_1, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValueXML(TABLE, ROW_1, COLUMN_1, VALUE_1 + VALUE_2); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testAppendPB() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //append cell response = appendValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1); response = appendValuePB(TABLE, ROW_1, COLUMN_1, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkValuePB(TABLE, ROW_1, COLUMN_1, VALUE_1 + VALUE_2); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testAppendJSON() throws IOException, JAXBException { Response response = getValueJson(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //append cell response = appendValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); putValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1); response = appendValueJson(TABLE, ROW_1, COLUMN_1, VALUE_2); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); putValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1 + VALUE_2); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testIncrementXML() throws IOException, JAXBException { Response response = getValueXML(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //append single cell response = incrementValueXML(TABLE, ROW_1, COLUMN_1, VALUE_5); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkIncrementValueXML(TABLE, ROW_1, COLUMN_1, Long.parseLong(VALUE_5)); response = incrementValueXML(TABLE, ROW_1, COLUMN_1, VALUE_6); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkIncrementValueXML(TABLE, ROW_1, COLUMN_1, Long.parseLong(VALUE_5) + Long.parseLong(VALUE_6)); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testIncrementPB() throws IOException, JAXBException { Response response = getValuePB(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //append cell response = incrementValuePB(TABLE, ROW_1, COLUMN_1, VALUE_5); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkIncrementValuePB(TABLE, ROW_1, COLUMN_1, Long.parseLong(VALUE_5)); response = incrementValuePB(TABLE, ROW_1, COLUMN_1, VALUE_6); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkIncrementValuePB(TABLE, ROW_1, COLUMN_1, Long.parseLong(VALUE_5) + Long.parseLong(VALUE_6)); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testIncrementJSON() throws IOException, JAXBException { Response response = getValueJson(TABLE, ROW_1, COLUMN_1); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); //append cell response = incrementValueJson(TABLE, ROW_1, COLUMN_1, VALUE_5); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkIncrementValueJSON(TABLE, ROW_1, COLUMN_1, Long.parseLong(VALUE_5)); response = incrementValueJson(TABLE, ROW_1, COLUMN_1, VALUE_6); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); checkIncrementValueJSON(TABLE, ROW_1, COLUMN_1, Long.parseLong(VALUE_5) + Long.parseLong(VALUE_6)); response = deleteRow(TABLE, ROW_1); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java index c984b698bb..5a9344ab15 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java @@ -25,6 +25,7 @@ import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; @@ -98,7 +99,7 @@ public class TestGzipFilter { headers[0] = new Header("Content-Type", Constants.MIMETYPE_BINARY); headers[1] = new Header("Content-Encoding", "gzip"); Response response = client.put(path, headers, value_1_gzip); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); Table table = TEST_UTIL.getConnection().getTable(TABLE); Get get = new Get(Bytes.toBytes(ROW_1)); @@ -113,7 +114,7 @@ public class TestGzipFilter { headers[0] = new Header("Accept", Constants.MIMETYPE_BINARY); headers[1] = new Header("Accept-Encoding", "gzip"); response = client.get(path, headers); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); ByteArrayInputStream bis = new ByteArrayInputStream(response.getBody()); GZIPInputStream is = new GZIPInputStream(bis); value = new byte[VALUE_1.length]; @@ -131,11 +132,11 @@ public class TestGzipFilter { headers[0] = new Header("Accept", Constants.MIMETYPE_BINARY); headers[1] = new Header("Accept-Encoding", "gzip"); Response response = client.get("/" + TABLE + "/" + ROW_1 + "/" + COLUMN_2, headers); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); String contentEncoding = response.getHeader("Content-Encoding"); assertTrue(contentEncoding == null || !contentEncoding.contains("gzip")); response = client.get("/" + TABLE, headers); - assertEquals(response.getCode(), 405); + assertEquals(405, response.getCode()); contentEncoding = response.getHeader("Content-Encoding"); assertTrue(contentEncoding == null || !contentEncoding.contains("gzip")); } @@ -146,14 +147,14 @@ public class TestGzipFilter { headers[1] = new Header("Accept", Constants.MIMETYPE_JSON); headers[2] = new Header("Accept-Encoding", "gzip"); Response response = client.post("/" + TABLE + "/scanner", headers, - "".getBytes()); - assertEquals(response.getCode(), 201); + "".getBytes(StandardCharsets.UTF_8)); + assertEquals(201, response.getCode()); String scannerUrl = response.getLocation(); assertNotNull(scannerUrl); response = client.get(scannerUrl); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); response = client.get(scannerUrl); - assertEquals(response.getCode(), 204); + assertEquals(204, response.getCode()); } } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java index 35fb7fd649..e58be134bf 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java @@ -43,8 +43,6 @@ import org.junit.runners.Parameterized; import javax.ws.rs.core.MediaType; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import javax.xml.bind.Marshaller; -import javax.xml.bind.Unmarshaller; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -72,9 +70,8 @@ public class TestMultiRowResource { private static final HBaseRESTTestingUtility REST_TEST_UTIL = new HBaseRESTTestingUtility(); private static Client client; + @SuppressWarnings("unused") private static JAXBContext context; - private static Marshaller marshaller; - private static Unmarshaller unmarshaller; private static Configuration conf; private static Header extraHdr = null; @@ -104,8 +101,6 @@ public class TestMultiRowResource { CellModel.class, CellSetModel.class, RowModel.class); - marshaller = context.createMarshaller(); - unmarshaller = context.createUnmarshaller(); client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort())); Admin admin = TEST_UTIL.getHBaseAdmin(); if (admin.tableExists(TABLE)) { @@ -148,7 +143,7 @@ public class TestMultiRowResource { Response response = client.get(path.toString(), Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); client.delete(row_5_url, extraHdr); @@ -175,7 +170,7 @@ public class TestMultiRowResource { Response response = client.get(path.toString(), Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); client.delete(row_5_url, extraHdr); @@ -202,7 +197,7 @@ public class TestMultiRowResource { client.post(row_6_url, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_2), extraHdr); Response response = client.get(path.toString(), Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); ObjectMapper mapper = new JacksonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); CellSetModel cellSet = (CellSetModel) mapper.readValue(response.getBody(), CellSetModel.class); @@ -231,7 +226,7 @@ public class TestMultiRowResource { client.post(row_5_url, Constants.MIMETYPE_BINARY, Bytes.toBytes(VALUE_1), extraHdr); Response response = client.get(path.toString(), Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); ObjectMapper mapper = new JacksonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE); CellSetModel cellSet = (CellSetModel) mapper.readValue(response.getBody(), CellSetModel.class); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java index 242c36d8ee..d941a9faba 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java @@ -38,8 +38,6 @@ import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import org.apache.commons.httpclient.Header; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -68,7 +66,7 @@ import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestScannerResource { - private static final Log LOG = LogFactory.getLog(TestScannerResource.class); + private static final TableName TABLE = TableName.valueOf("TestScannerResource"); private static final TableName TABLE_TO_BE_DISABLED = TableName.valueOf("ScannerResourceDisable"); private static final String NONEXISTENT_TABLE = "ThisTableDoesNotExist"; @@ -134,7 +132,7 @@ public class TestScannerResource { model.setBatch(100); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); int count = 0; @@ -160,7 +158,7 @@ public class TestScannerResource { } // delete the scanner response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); return count; } @@ -216,7 +214,7 @@ public class TestScannerResource { conf.set("hbase.rest.readonly", "true"); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); String scannerURI = response.getLocation(); assertNull(scannerURI); @@ -224,28 +222,28 @@ public class TestScannerResource { conf.set("hbase.rest.readonly", "false"); response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); // confirm batch size conformance - assertEquals(countCellSet(cellSet), BATCH_SIZE); + assertEquals(BATCH_SIZE, countCellSet(cellSet)); // test delete scanner operation is forbidden in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.delete(scannerURI); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // recall previous delete scanner operation with read-only off conf.set("hbase.rest.readonly", "false"); response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -260,7 +258,7 @@ public class TestScannerResource { conf.set("hbase.rest.readonly", "true"); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); String scannerURI = response.getLocation(); assertNull(scannerURI); @@ -268,28 +266,28 @@ public class TestScannerResource { conf.set("hbase.rest.readonly", "false"); response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_PROTOBUF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); CellSetModel cellSet = new CellSetModel(); cellSet.getObjectFromMessage(response.getBody()); // confirm batch size conformance - assertEquals(countCellSet(cellSet), BATCH_SIZE); + assertEquals(BATCH_SIZE, countCellSet(cellSet)); // test delete scanner operation is forbidden in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.delete(scannerURI); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // recall previous delete scanner operation with read-only off conf.set("hbase.rest.readonly", "false"); response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test @@ -303,7 +301,7 @@ public class TestScannerResource { conf.set("hbase.rest.readonly", "true"); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); String scannerURI = response.getLocation(); assertNull(scannerURI); @@ -311,13 +309,13 @@ public class TestScannerResource { conf.set("hbase.rest.readonly", "false"); response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell response = client.get(scannerURI, Constants.MIMETYPE_BINARY); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_BINARY, response.getHeader("content-type")); // verify that data was returned assertTrue(response.getBody().length > 0); @@ -340,23 +338,23 @@ public class TestScannerResource { // test delete scanner operation is forbidden in read-only mode conf.set("hbase.rest.readonly", "true"); response = client.delete(scannerURI); - assertEquals(response.getCode(), 403); + assertEquals(403, response.getCode()); // recall previous delete scanner operation with read-only off conf.set("hbase.rest.readonly", "false"); response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } @Test public void testFullTableScan() throws IOException { ScannerModel model = new ScannerModel(); model.addColumn(Bytes.toBytes(COLUMN_1)); - assertEquals(fullTableScan(model), expectedRows1); + assertEquals(expectedRows1, fullTableScan(model)); model = new ScannerModel(); model.addColumn(Bytes.toBytes(COLUMN_2)); - assertEquals(fullTableScan(model), expectedRows2); + assertEquals(expectedRows2, fullTableScan(model)); } @Test @@ -370,7 +368,7 @@ public class TestScannerResource { String scannerURI = response.getLocation(); assertNotNull(scannerURI); response = client.get(scannerURI, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 404); + assertEquals(404, response.getCode()); } // performs table scan during which the underlying table is disabled @@ -382,7 +380,7 @@ public class TestScannerResource { model.setCaching(1); Response response = client.put("/" + TABLE_TO_BE_DISABLED + "/scanner", Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput()); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); TEST_UTIL.getHBaseAdmin().disableTable(TABLE_TO_BE_DISABLED); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java index a06d650736..07d2f03013 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java @@ -113,8 +113,8 @@ public class TestScannersWithFilters { private static JAXBContext context; private static Marshaller marshaller; private static Unmarshaller unmarshaller; - private static long numRows = ROWS_ONE.length + ROWS_TWO.length; - private static long colsPerRow = FAMILIES.length * QUALIFIERS_ONE.length; + private static long numRows = (long) ROWS_ONE.length + ROWS_TWO.length; + private static long colsPerRow = (long) FAMILIES.length * QUALIFIERS_ONE.length; @BeforeClass public static void setUpBeforeClass() throws Exception { @@ -221,13 +221,13 @@ public class TestScannersWithFilters { byte[] body = Bytes.toBytes(writer.toString()); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cells = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); @@ -243,7 +243,7 @@ public class TestScannersWithFilters { // delete the scanner response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); } private static void verifyScanFull(Scan s, KeyValue [] kvs) @@ -256,20 +256,20 @@ public class TestScannersWithFilters { byte[] body = Bytes.toBytes(writer.toString()); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); // delete the scanner response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); int row = 0; int idx = 0; @@ -310,20 +310,20 @@ public class TestScannersWithFilters { byte[] body = Bytes.toBytes(writer.toString()); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response.getBody())); // delete the scanner response = client.delete(scannerURI); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); Iterator i = cellSet.getRows().iterator(); int j = 0; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java index 175b9f374d..c72517c949 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java @@ -26,7 +26,6 @@ import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.Random; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; @@ -166,6 +165,7 @@ public class TestScannersWithLabels { private static void createLabels() throws IOException, InterruptedException { PrivilegedExceptionAction action = new PrivilegedExceptionAction() { + @Override public VisibilityLabelsResponse run() throws Exception { String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET }; try (Connection conn = ConnectionFactory.createConnection(conf)) { @@ -200,14 +200,14 @@ public class TestScannersWithLabels { // recall previous put operation with read-only off conf.set("hbase.rest.readonly", "false"); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_XML); // Respond with 204 as there are no cells to be retrieved - assertEquals(response.getCode(), 204); + assertEquals(204, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); } @@ -225,18 +225,18 @@ public class TestScannersWithLabels { // recall previous put operation with read-only off conf.set("hbase.rest.readonly", "false"); Response response = client.put("/" + TABLE + "/scanner", Constants.MIMETYPE_XML, body); - assertEquals(response.getCode(), 201); + assertEquals(201, response.getCode()); String scannerURI = response.getLocation(); assertNotNull(scannerURI); // get a cell set response = client.get(scannerURI, Constants.MIMETYPE_XML); // Respond with 204 as there are no cells to be retrieved - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); CellSetModel cellSet = (CellSetModel) unmarshaller.unmarshal(new ByteArrayInputStream(response .getBody())); - assertEquals(countCellSet(cellSet), 5); + assertEquals(5, countCellSet(cellSet)); } } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java index bba185da70..2ee3397b5c 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java @@ -25,12 +25,9 @@ import java.io.IOException; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Response; @@ -46,7 +43,6 @@ import org.junit.experimental.categories.Category; @Category(MediumTests.class) public class TestStatusResource { - private static final Log LOG = LogFactory.getLog(TestStatusResource.class); private static final byte[] META_REGION_NAME = Bytes.toBytes(TableName.META_TABLE_NAME+",,1"); @@ -96,7 +92,7 @@ public class TestStatusResource { @Test public void testGetClusterStatusXML() throws IOException, JAXBException { Response response = client.get("/status/cluster", Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); StorageClusterStatusModel model = (StorageClusterStatusModel) context.createUnmarshaller().unmarshal( @@ -107,13 +103,13 @@ public class TestStatusResource { @Test public void testGetClusterStatusPB() throws IOException { Response response = client.get("/status/cluster", Constants.MIMETYPE_PROTOBUF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); StorageClusterStatusModel model = new StorageClusterStatusModel(); model.getObjectFromMessage(response.getBody()); validate(model); response = client.get("/status/cluster", Constants.MIMETYPE_PROTOBUF_IETF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type")); model = new StorageClusterStatusModel(); model.getObjectFromMessage(response.getBody()); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java index 3abcb4a010..25508b0fd1 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java @@ -127,13 +127,13 @@ public class TestTableResource { admin.split(TABLE); // give some time for the split to happen - long timeout = EnvironmentEdgeManager.currentTime() + (15 * 1000); - TEST_UTIL.waitFor(timeout, 250, new Waiter.Predicate() { + TEST_UTIL.waitFor(15 * 1000, 250, new Waiter.Predicate() { @Override public boolean evaluate() throws IOException { List regionLocations = regionLocator.getAllRegionLocations(); - return regionLocations.size() == 2 && regionLocations.get(0).getServerName() != null - && regionLocations.get(1).getServerName() != null; + return regionLocations != null && + regionLocations.size() == 2 && regionLocations.get(0).getServerName() != null && + regionLocations.get(1).getServerName() != null; } }); m = regionLocator.getAllRegionLocations(); @@ -199,14 +199,14 @@ public class TestTableResource { @Test public void testTableListText() throws IOException { Response response = client.get("/", Constants.MIMETYPE_TEXT); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type")); } @Test public void testTableListXML() throws IOException, JAXBException { Response response = client.get("/", Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); TableListModel model = (TableListModel) context.createUnmarshaller() @@ -217,20 +217,20 @@ public class TestTableResource { @Test public void testTableListJSON() throws IOException { Response response = client.get("/", Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); } @Test public void testTableListPB() throws IOException, JAXBException { Response response = client.get("/", Constants.MIMETYPE_PROTOBUF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); TableListModel model = new TableListModel(); model.getObjectFromMessage(response.getBody()); checkTableList(model); response = client.get("/", Constants.MIMETYPE_PROTOBUF_IETF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type")); model = new TableListModel(); model.getObjectFromMessage(response.getBody()); @@ -240,14 +240,14 @@ public class TestTableResource { @Test public void testTableInfoText() throws IOException { Response response = client.get("/" + TABLE + "/regions", Constants.MIMETYPE_TEXT); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type")); } @Test public void testTableInfoXML() throws IOException, JAXBException { Response response = client.get("/" + TABLE + "/regions", Constants.MIMETYPE_XML); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type")); TableInfoModel model = (TableInfoModel) context.createUnmarshaller() @@ -258,20 +258,20 @@ public class TestTableResource { @Test public void testTableInfoJSON() throws IOException { Response response = client.get("/" + TABLE + "/regions", Constants.MIMETYPE_JSON); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type")); } @Test public void testTableInfoPB() throws IOException, JAXBException { Response response = client.get("/" + TABLE + "/regions", Constants.MIMETYPE_PROTOBUF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type")); TableInfoModel model = new TableInfoModel(); model.getObjectFromMessage(response.getBody()); checkTableInfo(model); response = client.get("/" + TABLE + "/regions", Constants.MIMETYPE_PROTOBUF_IETF); - assertEquals(response.getCode(), 200); + assertEquals(200, response.getCode()); assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type")); model = new TableInfoModel(); model.getObjectFromMessage(response.getBody()); @@ -279,4 +279,3 @@ public class TestTableResource { } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java index 0654f78a81..d3e618bed6 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestTableScan.java @@ -28,6 +28,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -278,12 +279,14 @@ public class TestTableScan { // install the callback on all ClientSideCellSetModel instances unmarshaller.setListener(new Unmarshaller.Listener() { + @Override public void beforeUnmarshal(Object target, Object parent) { if (target instanceof ClientSideCellSetModel) { ((ClientSideCellSetModel) target).setCellSetModelListener(listener); } } + @Override public void afterUnmarshal(Object target, Object parent) { if (target instanceof ClientSideCellSetModel) { ((ClientSideCellSetModel) target).setCellSetModelListener(null); @@ -485,7 +488,8 @@ public class TestTableScan { CellSetModel model = (CellSetModel) ush.unmarshal(response.getStream()); int count = TestScannerResource.countCellSet(model); assertEquals(1, count); - assertEquals("aab", new String(model.getRows().get(0).getCells().get(0).getValue())); + assertEquals("aab", new String(model.getRows().get(0).getCells().get(0).getValue(), + StandardCharsets.UTF_8)); } @Test @@ -503,7 +507,8 @@ public class TestTableScan { CellSetModel model = (CellSetModel) ush.unmarshal(response.getStream()); int count = TestScannerResource.countCellSet(model); assertEquals(1, count); - assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue())); + assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue(), + StandardCharsets.UTF_8)); } @@ -522,7 +527,8 @@ public class TestTableScan { CellSetModel model = (CellSetModel) ush.unmarshal(response.getStream()); int count = TestScannerResource.countCellSet(model); assertEquals(1, count); - assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue())); + assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue(), + StandardCharsets.UTF_8)); } @Test @@ -541,7 +547,8 @@ public class TestTableScan { CellSetModel model = (CellSetModel) ush.unmarshal(response.getStream()); int count = TestScannerResource.countCellSet(model); assertEquals(1, count); - assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue())); + assertEquals("abc", new String(model.getRows().get(0).getCells().get(0).getValue(), + StandardCharsets.UTF_8)); } @Test @@ -606,10 +613,10 @@ public class TestTableScan { RowModel rowModel = rowModels.get(i); RowModel reversedRowModel = reversedRowModels.get(i); - assertEquals(new String(rowModel.getKey(), "UTF-8"), - new String(reversedRowModel.getKey(), "UTF-8")); - assertEquals(new String(rowModel.getCells().get(0).getValue(), "UTF-8"), - new String(reversedRowModel.getCells().get(0).getValue(), "UTF-8")); + assertEquals(new String(rowModel.getKey(), StandardCharsets.UTF_8), + new String(reversedRowModel.getKey(), StandardCharsets.UTF_8)); + assertEquals(new String(rowModel.getCells().get(0).getValue(), StandardCharsets.UTF_8), + new String(reversedRowModel.getCells().get(0).getValue(), StandardCharsets.UTF_8)); } } @@ -658,7 +665,7 @@ public class TestTableScan { public void setCellSetModelListener(final Listener l) { row = (l == null) ? null : new ArrayList() { private static final long serialVersionUID = 1L; - + @Override public boolean add(RowModel o) { l.handleRowModel(ClientSideCellSetModel.this, o); listenerInvoked = true; diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java index 60a434025b..03c462e6d1 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java @@ -78,7 +78,7 @@ public class TestVersionResource { private static void validate(VersionModel model) { assertNotNull(model); assertNotNull(model.getRESTVersion()); - assertEquals(model.getRESTVersion(), RESTServlet.VERSION_STRING); + assertEquals(RESTServlet.VERSION_STRING, model.getRESTVersion()); String osVersion = model.getOSVersion(); assertNotNull(osVersion); assertTrue(osVersion.contains(System.getProperty("os.name"))); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java index 586e33c183..c12bbb63bb 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestXmlParsing.java @@ -23,6 +23,8 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; +import java.nio.charset.StandardCharsets; + import javax.xml.bind.UnmarshalException; import org.apache.commons.logging.Log; @@ -48,7 +50,7 @@ public class TestXmlParsing { + ""; Client client = mock(Client.class); RemoteAdmin admin = new RemoteAdmin(client, HBaseConfiguration.create(), null); - Response resp = new Response(200, null, xml.getBytes()); + Response resp = new Response(200, null, xml.getBytes(StandardCharsets.UTF_8)); when(client.get("/version/cluster", Constants.MIMETYPE_XML)).thenReturn(resp); @@ -64,7 +66,7 @@ public class TestXmlParsing { + " &xee;"; Client client = mock(Client.class); RemoteAdmin admin = new RemoteAdmin(client, HBaseConfiguration.create(), null); - Response resp = new Response(200, null, externalEntitiesXml.getBytes()); + Response resp = new Response(200, null, externalEntitiesXml.getBytes(StandardCharsets.UTF_8)); when(client.get("/version/cluster", Constants.MIMETYPE_XML)).thenReturn(resp); diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellModel.java index bc273b46bc..a21a31659b 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellModel.java @@ -43,6 +43,7 @@ public class TestCellModel extends TestModelBase { "{\"column\":\"dGVzdGNvbHVtbg==\",\"timestamp\":1245219839331,\"$\":\"dGVzdHZhbHVl\"}"; } + @Override protected CellModel buildTestModel() { CellModel model = new CellModel(); model.setColumn(COLUMN); @@ -51,21 +52,25 @@ public class TestCellModel extends TestModelBase { return model; } + @Override protected void checkModel(CellModel model) { assertTrue(Bytes.equals(model.getColumn(), COLUMN)); assertTrue(Bytes.equals(model.getValue(), VALUE)); assertTrue(model.hasUserTimestamp()); - assertEquals(model.getTimestamp(), TIMESTAMP); + assertEquals(TIMESTAMP, model.getTimestamp()); } + @Override public void testBuildModel() throws Exception { checkModel(buildTestModel()); } + @Override public void testFromXML() throws Exception { checkModel(fromXML(AS_XML)); } + @Override public void testFromPB() throws Exception { checkModel(fromPB(AS_PB)); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellSetModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellSetModel.java index 08cd0e43a0..56c267967f 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellSetModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestCellSetModel.java @@ -80,6 +80,7 @@ public class TestCellSetModel extends TestModelBase { "\"timestamp\":1245393318192,\"$\":\"dGVzdHZhbHVlMw==\"}]}]}"; } + @Override protected CellSetModel buildTestModel() { CellSetModel model = new CellSetModel(); RowModel row; @@ -95,6 +96,7 @@ public class TestCellSetModel extends TestModelBase { return model; } + @Override protected void checkModel(CellSetModel model) { Iterator rows = model.getRows().iterator(); RowModel row = rows.next(); @@ -104,7 +106,7 @@ public class TestCellSetModel extends TestModelBase { assertTrue(Bytes.equals(COLUMN1, cell.getColumn())); assertTrue(Bytes.equals(VALUE1, cell.getValue())); assertTrue(cell.hasUserTimestamp()); - assertEquals(cell.getTimestamp(), TIMESTAMP1); + assertEquals(TIMESTAMP1, cell.getTimestamp()); assertFalse(cells.hasNext()); row = rows.next(); assertTrue(Bytes.equals(ROW2, row.getKey())); @@ -113,26 +115,28 @@ public class TestCellSetModel extends TestModelBase { assertTrue(Bytes.equals(COLUMN2, cell.getColumn())); assertTrue(Bytes.equals(VALUE2, cell.getValue())); assertTrue(cell.hasUserTimestamp()); - assertEquals(cell.getTimestamp(), TIMESTAMP2); + assertEquals(TIMESTAMP2, cell.getTimestamp()); cell = cells.next(); assertTrue(Bytes.equals(COLUMN3, cell.getColumn())); assertTrue(Bytes.equals(VALUE3, cell.getValue())); assertTrue(cell.hasUserTimestamp()); - assertEquals(cell.getTimestamp(), TIMESTAMP3); + assertEquals(TIMESTAMP3, cell.getTimestamp()); assertFalse(cells.hasNext()); } + @Override public void testBuildModel() throws Exception { checkModel(buildTestModel()); } + @Override public void testFromXML() throws Exception { checkModel(fromXML(AS_XML)); } + @Override public void testFromPB() throws Exception { checkModel(fromPB(AS_PB)); } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java index bf1d2046ca..f8fa963793 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java @@ -47,6 +47,7 @@ public class TestColumnSchemaModel extends TestModelBase { "\"TTL\":\"86400\",\"IN_MEMORY\":\"false\"}"; } + @Override protected ColumnSchemaModel buildTestModel() { ColumnSchemaModel model = new ColumnSchemaModel(); model.setName(COLUMN_NAME); @@ -60,18 +61,19 @@ public class TestColumnSchemaModel extends TestModelBase { return model; } + @Override protected void checkModel(ColumnSchemaModel model) { - assertEquals(model.getName(), COLUMN_NAME); - assertEquals(model.__getBlockcache(), BLOCKCACHE); - assertEquals(model.__getBlocksize(), BLOCKSIZE); - assertEquals(model.__getBloomfilter(), BLOOMFILTER); + assertEquals(COLUMN_NAME, model.getName()); + assertEquals(BLOCKCACHE, model.__getBlockcache()); + assertEquals(BLOCKSIZE, model.__getBlocksize()); + assertEquals(BLOOMFILTER, model.__getBloomfilter()); assertTrue(model.__getCompression().equalsIgnoreCase(COMPRESSION)); - assertEquals(model.__getInMemory(), IN_MEMORY); - assertEquals(model.__getTTL(), TTL); - assertEquals(model.__getVersions(), VERSIONS); + assertEquals(IN_MEMORY, model.__getInMemory()); + assertEquals(TTL, model.__getTTL()); + assertEquals(VERSIONS, model.__getVersions()); } + @Override public void testFromPB() throws Exception { } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java index 293110a63c..ad3a667a22 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestModelBase.java @@ -93,7 +93,7 @@ public abstract class TestModelBase extends TestCase { protected T fromPB(String pb) throws Exception { return (T)clazz.getMethod("getObjectFromMessage", byte[].class).invoke( - clazz.newInstance(), + clazz.getDeclaredConstructor().newInstance(), Base64.decode(AS_PB)); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.java index c1bf0c8291..1a0fd1b021 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestNamespacesInstanceModel.java @@ -53,6 +53,7 @@ public class TestNamespacesInstanceModel extends TestModelBase { AS_JSON = "{\"Namespace\":[\"testNamespace1\",\"testNamespace2\"]}"; } + @Override protected NamespacesModel buildTestModel() { return buildTestModel(NAMESPACE_NAME_1, NAMESPACE_NAME_2); } @@ -56,6 +57,7 @@ public class TestNamespacesModel extends TestModelBase { return model; } + @Override protected void checkModel(NamespacesModel model) { checkModel(model, NAMESPACE_NAME_1, NAMESPACE_NAME_2); } @@ -69,16 +71,19 @@ public class TestNamespacesModel extends TestModelBase { } @Test + @Override public void testBuildModel() throws Exception { checkModel(buildTestModel()); } @Test + @Override public void testFromXML() throws Exception { checkModel(fromXML(AS_XML)); } @Test + @Override public void testFromPB() throws Exception { checkModel(fromPB(AS_PB)); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestRowModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestRowModel.java index 98ccb662c2..4301f9e962 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestRowModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestRowModel.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.rest.model; import java.util.Iterator; -import javax.xml.bind.JAXBContext; - import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -36,8 +34,6 @@ public class TestRowModel extends TestModelBase { private static final byte[] VALUE1 = Bytes.toBytes("testvalue1"); private static final long TIMESTAMP1 = 1245219839331L; - private JAXBContext context; - public TestRowModel() throws Exception { super(RowModel.class); AS_XML = @@ -49,6 +45,7 @@ public class TestRowModel extends TestModelBase { "\"timestamp\":1245219839331,\"$\":\"dGVzdHZhbHVlMQ==\"}]}"; } + @Override protected RowModel buildTestModel() { RowModel model = new RowModel(); model.setKey(ROW1); @@ -56,6 +53,7 @@ public class TestRowModel extends TestModelBase { return model; } + @Override protected void checkModel(RowModel model) { assertTrue(Bytes.equals(ROW1, model.getKey())); Iterator cells = model.getCells().iterator(); @@ -63,7 +61,7 @@ public class TestRowModel extends TestModelBase { assertTrue(Bytes.equals(COLUMN1, cell.getColumn())); assertTrue(Bytes.equals(VALUE1, cell.getValue())); assertTrue(cell.hasUserTimestamp()); - assertEquals(cell.getTimestamp(), TIMESTAMP1); + assertEquals(TIMESTAMP1, cell.getTimestamp()); assertFalse(cells.hasNext()); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java index f05d79f99a..80f8764118 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java @@ -57,6 +57,7 @@ public class TestScannerModel extends TestModelBase { + "JDj/////B0joB1IHcHJpdmF0ZVIGcHVibGljWAA="; } + @Override protected ScannerModel buildTestModel() { ScannerModel model = new ScannerModel(); model.setStartRow(START_ROW); @@ -73,6 +74,7 @@ public class TestScannerModel extends TestModelBase { return model; } + @Override protected void checkModel(ScannerModel model) { assertTrue(Bytes.equals(model.getStartRow(), START_ROW)); assertTrue(Bytes.equals(model.getEndRow(), END_ROW)); @@ -86,11 +88,11 @@ public class TestScannerModel extends TestModelBase { } assertTrue(foundCol1); assertTrue(foundCol2); - assertEquals(model.getStartTime(), START_TIME); - assertEquals(model.getEndTime(), END_TIME); - assertEquals(model.getBatch(), BATCH); - assertEquals(model.getCaching(), CACHING); - assertEquals(model.getCacheBlocks(), CACHE_BLOCKS); + assertEquals(START_TIME, model.getStartTime()); + assertEquals(END_TIME, model.getEndTime()); + assertEquals(BATCH, model.getBatch()); + assertEquals(CACHING, model.getCaching()); + assertEquals(CACHE_BLOCKS, model.getCacheBlocks()); boolean foundLabel1 = false; boolean foundLabel2 = false; if (model.getLabels() != null && model.getLabels().size() > 0) { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java index 743709625f..78afd2f769 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java @@ -73,6 +73,7 @@ public class TestStorageClusterStatusModel extends TestModelBase nodes = model.getLiveNodes().iterator(); StorageClusterStatusModel.Node node = nodes.next(); - assertEquals(node.getName(), "test1"); - assertEquals(node.getStartCode(), 1245219839331L); - assertEquals(node.getHeapSizeMB(), 128); - assertEquals(node.getMaxHeapSizeMB(), 1024); + assertEquals("test1", node.getName()); + assertEquals(1245219839331L, node.getStartCode()); + assertEquals(128, node.getHeapSizeMB()); + assertEquals(1024, node.getMaxHeapSizeMB()); Iterator regions = node.getRegions().iterator(); StorageClusterStatusModel.Node.Region region = regions.next(); assertTrue(Bytes.toString(region.getName()).equals( "hbase:root,,0")); - assertEquals(region.getStores(), 1); - assertEquals(region.getStorefiles(), 1); - assertEquals(region.getStorefileSizeMB(), 0); - assertEquals(region.getMemstoreSizeMB(), 0); - assertEquals(region.getStorefileIndexSizeMB(), 0); - assertEquals(region.getReadRequestsCount(), 1); - assertEquals(region.getWriteRequestsCount(), 2); - assertEquals(region.getRootIndexSizeKB(), 1); - assertEquals(region.getTotalStaticIndexSizeKB(), 1); - assertEquals(region.getTotalStaticBloomSizeKB(), 1); - assertEquals(region.getTotalCompactingKVs(), 1); - assertEquals(region.getCurrentCompactedKVs(), 1); + assertEquals(1, region.getStores()); + assertEquals(1, region.getStorefiles()); + assertEquals(0, region.getStorefileSizeMB()); + assertEquals(0, region.getMemstoreSizeMB()); + assertEquals(0, region.getStorefileIndexSizeMB()); + assertEquals(1, region.getReadRequestsCount()); + assertEquals(2, region.getWriteRequestsCount()); + assertEquals(1, region.getRootIndexSizeKB()); + assertEquals(1, region.getTotalStaticIndexSizeKB()); + assertEquals(1, region.getTotalStaticBloomSizeKB()); + assertEquals(1, region.getTotalCompactingKVs()); + assertEquals(1, region.getCurrentCompactedKVs()); assertFalse(regions.hasNext()); node = nodes.next(); - assertEquals(node.getName(), "test2"); - assertEquals(node.getStartCode(), 1245239331198L); - assertEquals(node.getHeapSizeMB(), 512); - assertEquals(node.getMaxHeapSizeMB(), 1024); + assertEquals("test2", node.getName()); + assertEquals(1245239331198L, node.getStartCode()); + assertEquals(512, node.getHeapSizeMB()); + assertEquals(1024, node.getMaxHeapSizeMB()); regions = node.getRegions().iterator(); region = regions.next(); assertEquals(Bytes.toString(region.getName()), TableName.META_TABLE_NAME+",,1246000043724"); - assertEquals(region.getStores(), 1); - assertEquals(region.getStorefiles(), 1); - assertEquals(region.getStorefileSizeMB(), 0); - assertEquals(region.getMemstoreSizeMB(), 0); - assertEquals(region.getStorefileIndexSizeMB(), 0); - assertEquals(region.getReadRequestsCount(), 1); - assertEquals(region.getWriteRequestsCount(), 2); - assertEquals(region.getRootIndexSizeKB(), 1); - assertEquals(region.getTotalStaticIndexSizeKB(), 1); - assertEquals(region.getTotalStaticBloomSizeKB(), 1); - assertEquals(region.getTotalCompactingKVs(), 1); - assertEquals(region.getCurrentCompactedKVs(), 1); + assertEquals(1, region.getStores()); + assertEquals(1, region.getStorefiles()); + assertEquals(0, region.getStorefileSizeMB()); + assertEquals(0, region.getMemstoreSizeMB()); + assertEquals(0, region.getStorefileIndexSizeMB()); + assertEquals(1, region.getReadRequestsCount()); + assertEquals(2, region.getWriteRequestsCount()); + assertEquals(1, region.getRootIndexSizeKB()); + assertEquals(1, region.getTotalStaticIndexSizeKB()); + assertEquals(1, region.getTotalStaticBloomSizeKB()); + assertEquals(1, region.getTotalCompactingKVs()); + assertEquals(1, region.getCurrentCompactedKVs()); assertFalse(regions.hasNext()); assertFalse(nodes.hasNext()); } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterVersionModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterVersionModel.java index e4685495ee..4fa25906b1 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterVersionModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterVersionModel.java @@ -35,14 +35,16 @@ public class TestStorageClusterVersionModel extends TestModelBase { "startKey\":\"YWJyYWNhZGJyYQ==\"}]}"; } + @Override protected TableInfoModel buildTestModel() { TableInfoModel model = new TableInfoModel(); model.setName(TABLE); @@ -61,25 +62,32 @@ public class TestTableInfoModel extends TestModelBase { return model; } + @Override protected void checkModel(TableInfoModel model) { - assertEquals(model.getName(), TABLE); + assertEquals(TABLE, model.getName()); Iterator regions = model.getRegions().iterator(); TableRegionModel region = regions.next(); assertTrue(Bytes.equals(region.getStartKey(), START_KEY)); assertTrue(Bytes.equals(region.getEndKey(), END_KEY)); - assertEquals(region.getId(), ID); - assertEquals(region.getLocation(), LOCATION); + assertEquals(ID, region.getId()); + assertEquals(LOCATION, region.getLocation()); assertFalse(regions.hasNext()); } + @Test + @Override public void testBuildModel() throws Exception { checkModel(buildTestModel()); } + @Test + @Override public void testFromXML() throws Exception { checkModel(fromXML(AS_XML)); } + @Test + @Override public void testFromPB() throws Exception { checkModel(fromPB(AS_PB)); } diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableListModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableListModel.java index ea5960ddc2..7cbf4d63aa 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableListModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableListModel.java @@ -43,6 +43,7 @@ public class TestTableListModel extends TestModelBase { "{\"table\":[{\"name\":\"table1\"},{\"name\":\"table2\"},{\"name\":\"table3\"}]}"; } + @Override protected TableListModel buildTestModel() { TableListModel model = new TableListModel(); model.add(new TableModel(TABLE1)); @@ -51,15 +52,15 @@ public class TestTableListModel extends TestModelBase { return model; } + @Override protected void checkModel(TableListModel model) { Iterator tables = model.getTables().iterator(); TableModel table = tables.next(); - assertEquals(table.getName(), TABLE1); + assertEquals(TABLE1, table.getName()); table = tables.next(); - assertEquals(table.getName(), TABLE2); + assertEquals(TABLE2, table.getName()); table = tables.next(); - assertEquals(table.getName(), TABLE3); + assertEquals(TABLE3, table.getName()); assertFalse(tables.hasNext()); } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java index 5df67b03b6..d37676679e 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java @@ -48,20 +48,22 @@ public class TestTableRegionModel extends TestModelBase { "startKey\":\"YWJyYWNhZGJyYQ==\"}"; } + @Override protected TableRegionModel buildTestModel() { TableRegionModel model = new TableRegionModel(TABLE, ID, START_KEY, END_KEY, LOCATION); return model; } + @Override protected void checkModel(TableRegionModel model) { assertTrue(Bytes.equals(model.getStartKey(), START_KEY)); assertTrue(Bytes.equals(model.getEndKey(), END_KEY)); - assertEquals(model.getId(), ID); - assertEquals(model.getLocation(), LOCATION); - assertEquals(model.getName(), - TABLE + "," + Bytes.toString(START_KEY) + "," + Long.toString(ID) + - ".ad9860f031282c46ed431d7af8f94aca."); + assertEquals(ID, model.getId()); + assertEquals(LOCATION, model.getLocation()); + assertEquals(TABLE + "," + Bytes.toString(START_KEY) + "," + Long.toString(ID) + + ".ad9860f031282c46ed431d7af8f94aca.", + model.getName()); } public void testGetName() { diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java index baaaf8c24e..853b2a1fdd 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java @@ -21,8 +21,6 @@ package org.apache.hadoop.hbase.rest.model; import java.util.Iterator; -import javax.xml.bind.JAXBContext; - import org.apache.hadoop.hbase.testclassification.SmallTests; import org.junit.experimental.categories.Category; @@ -37,8 +35,6 @@ public class TestTableSchemaModel extends TestModelBase { TestColumnSchemaModel testColumnSchemaModel; - private JAXBContext context; - public TestTableSchemaModel() throws Exception { super(TableSchemaModel.class); testColumnSchemaModel = new TestColumnSchemaModel(); @@ -63,6 +59,7 @@ public class TestTableSchemaModel extends TestModelBase { "\"COMPRESSION\":\"GZ\",\"VERSIONS\":\"1\",\"TTL\":\"86400\",\"IN_MEMORY\":\"false\"}]}"; } + @Override protected TableSchemaModel buildTestModel() { return buildTestModel(TABLE_NAME); } @@ -77,15 +74,16 @@ public class TestTableSchemaModel extends TestModelBase { return model; } + @Override protected void checkModel(TableSchemaModel model) { checkModel(model, TABLE_NAME); } public void checkModel(TableSchemaModel model, String tableName) { - assertEquals(model.getName(), tableName); - assertEquals(model.__getIsMeta(), IS_META); - assertEquals(model.__getIsRoot(), IS_ROOT); - assertEquals(model.__getReadOnly(), READONLY); + assertEquals(tableName, model.getName()); + assertEquals(IS_META, model.__getIsMeta()); + assertEquals(IS_ROOT, model.__getIsRoot()); + assertEquals(READONLY, model.__getReadOnly()); Iterator families = model.getColumns().iterator(); assertTrue(families.hasNext()); ColumnSchemaModel family = families.next(); @@ -93,17 +91,19 @@ public class TestTableSchemaModel extends TestModelBase { assertFalse(families.hasNext()); } + @Override public void testBuildModel() throws Exception { checkModel(buildTestModel()); } + @Override public void testFromXML() throws Exception { checkModel(fromXML(AS_XML)); } + @Override public void testFromPB() throws Exception { checkModel(fromPB(AS_PB)); } } - diff --git a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java index 154afe1479..0a881d32be 100644 --- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java +++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java @@ -21,12 +21,7 @@ package org.apache.hadoop.hbase.rest.model; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assume; -import org.junit.Test; import org.junit.experimental.categories.Category; -import org.mockito.Mockito; - -import javax.servlet.ServletContext; @Category(SmallTests.class) public class TestVersionModel extends TestModelBase { @@ -55,6 +50,7 @@ public class TestVersionModel extends TestModelBase { "REST\":\"0.0.1\",\"Server\":\"6.1.14\"}"; } + @Override protected VersionModel buildTestModel() { VersionModel model = new VersionModel(); model.setRESTVersion(REST_VERSION); @@ -65,12 +61,13 @@ public class TestVersionModel extends TestModelBase { return model; } + @Override protected void checkModel(VersionModel model) { - assertEquals(model.getRESTVersion(), REST_VERSION); - assertEquals(model.getOSVersion(), OS_VERSION); - assertEquals(model.getJVMVersion(), JVM_VERSION); - assertEquals(model.getServerVersion(), JETTY_VERSION); - assertEquals(model.getJerseyVersion(), JERSEY_VERSION); + assertEquals(REST_VERSION, model.getRESTVersion()); + assertEquals(OS_VERSION, model.getOSVersion()); + assertEquals(JVM_VERSION, model.getJVMVersion()); + assertEquals(JETTY_VERSION, model.getServerVersion()); + assertEquals(JERSEY_VERSION, model.getJerseyVersion()); } } diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java index 125e08ee57..0003df0243 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminServer.java @@ -314,15 +314,15 @@ public class RSGroupAdminServer implements RSGroupAdmin { if (master.getMasterCoprocessorHost() != null) { master.getMasterCoprocessorHost().preRemoveRSGroup(name); } - RSGroupInfo RSGroupInfo = getRSGroupInfoManager().getRSGroup(name); - if(RSGroupInfo == null) { + RSGroupInfo groupInfo = getRSGroupInfoManager().getRSGroup(name); + if(groupInfo == null) { throw new ConstraintException("Group "+name+" does not exist"); } - int tableCount = RSGroupInfo.getTables().size(); + int tableCount = groupInfo.getTables().size(); if (tableCount > 0) { throw new ConstraintException("Group "+name+" must have no associated tables: "+tableCount); } - int serverCount = RSGroupInfo.getServers().size(); + int serverCount = groupInfo.getServers().size(); if(serverCount > 0) { throw new ConstraintException( "Group "+name+" must have no associated servers: "+serverCount); diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java index e1bcb25b07..049e723047 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java @@ -77,7 +77,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc private Configuration config; private ClusterStatus clusterStatus; private MasterServices masterServices; - private RSGroupInfoManager RSGroupInfoManager; + private RSGroupInfoManager infoManager; private LoadBalancer internalBalancer; //used during reflection by LoadBalancerFactory @@ -88,7 +88,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc //This constructor should only be used for unit testing @InterfaceAudience.Private public RSGroupBasedLoadBalancer(RSGroupInfoManager RSGroupInfoManager) { - this.RSGroupInfoManager = RSGroupInfoManager; + this.infoManager = RSGroupInfoManager; } @Override @@ -133,7 +133,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc regionPlans.add(new RegionPlan(regionInfo, null, null)); } try { - for (RSGroupInfo info : RSGroupInfoManager.listRSGroups()) { + for (RSGroupInfo info : infoManager.listRSGroups()) { Map> groupClusterState = new HashMap>(); for (Address addr : info.getServers()) { @@ -192,7 +192,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc Set misplacedRegions = getMisplacedRegions(regions); for (HRegionInfo region : regions.keySet()) { if (!misplacedRegions.contains(region)) { - String groupName = RSGroupInfoManager.getRSGroupOfTable(region.getTable()); + String groupName = infoManager.getRSGroupOfTable(region.getTable()); groupToRegion.put(groupName, region); } } @@ -201,7 +201,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc for (String key : groupToRegion.keySet()) { Map currentAssignmentMap = new TreeMap(); List regionList = groupToRegion.get(key); - RSGroupInfo info = RSGroupInfoManager.getRSGroup(key); + RSGroupInfo info = infoManager.getRSGroup(key); List candidateList = filterOfflineServers(info, servers); for (HRegionInfo region : regionList) { currentAssignmentMap.put(region, regions.get(region)); @@ -213,9 +213,9 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc } for (HRegionInfo region : misplacedRegions) { - String groupName = RSGroupInfoManager.getRSGroupOfTable( + String groupName = infoManager.getRSGroupOfTable( region.getTable()); - RSGroupInfo info = RSGroupInfoManager.getRSGroup(groupName); + RSGroupInfo info = infoManager.getRSGroup(groupName); List candidateList = filterOfflineServers(info, servers); ServerName server = this.internalBalancer.randomAssignment(region, candidateList); @@ -261,14 +261,14 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc ListMultimap serverMap) throws HBaseIOException { try { for (HRegionInfo region : regions) { - String groupName = RSGroupInfoManager.getRSGroupOfTable(region.getTable()); + String groupName = infoManager.getRSGroupOfTable(region.getTable()); if(groupName == null) { LOG.warn("Group for table "+region.getTable()+" is null"); } regionMap.put(groupName, region); } for (String groupKey : regionMap.keySet()) { - RSGroupInfo info = RSGroupInfoManager.getRSGroup(groupKey); + RSGroupInfo info = infoManager.getRSGroup(groupKey); serverMap.putAll(groupKey, filterOfflineServers(info, servers)); if(serverMap.get(groupKey).size() < 1) { serverMap.put(groupKey, LoadBalancer.BOGUS_SERVER_NAME); @@ -285,7 +285,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc return filterServers(RSGroupInfo.getServers(), onlineServers); } else { LOG.debug("Group Information found to be null. Some regions might be unassigned."); - return Collections.EMPTY_LIST; + return Collections.emptyList(); } } @@ -311,17 +311,6 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc return finalList; } - private ListMultimap groupRegions( - List regionList) throws IOException { - ListMultimap regionGroup = ArrayListMultimap - .create(); - for (HRegionInfo region : regionList) { - String groupName = RSGroupInfoManager.getRSGroupOfTable(region.getTable()); - regionGroup.put(groupName, region); - } - return regionGroup; - } - private Set getMisplacedRegions( Map regions) throws IOException { Set misplacedRegions = new HashSet(); @@ -329,13 +318,13 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc HRegionInfo regionInfo = region.getKey(); ServerName assignedServer = region.getValue(); RSGroupInfo info = - RSGroupInfoManager.getRSGroup(RSGroupInfoManager.getRSGroupOfTable(regionInfo.getTable())); + infoManager.getRSGroup(infoManager.getRSGroupOfTable(regionInfo.getTable())); if (assignedServer != null && (info == null || !info.containsServer(assignedServer.getAddress()))) { LOG.debug("Found misplaced region: " + regionInfo.getRegionNameAsString() + " on server: " + assignedServer + " found in group: " + - RSGroupInfoManager.getRSGroupOfServer(assignedServer.getAddress()) + + infoManager.getRSGroupOfServer(assignedServer.getAddress()) + " outside of group: " + (info == null ? "UNKNOWN" : info.getName())); misplacedRegions.add(regionInfo); } @@ -355,8 +344,8 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc for (HRegionInfo region : regions) { RSGroupInfo info = null; try { - info = RSGroupInfoManager.getRSGroup( - RSGroupInfoManager.getRSGroupOfTable(region.getTable())); + info = infoManager.getRSGroup( + infoManager.getRSGroupOfTable(region.getTable())); } catch (IOException exp) { LOG.debug("Group information null for region of table " + region.getTable(), exp); @@ -374,7 +363,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc @Override public void initialize() throws HBaseIOException { try { - if (RSGroupInfoManager == null) { + if (infoManager == null) { List cps = masterServices.getMasterCoprocessorHost().findCoprocessors(RSGroupAdminEndpoint.class); if (cps.size() != 1) { @@ -382,7 +371,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc LOG.error(msg); throw new HBaseIOException(msg); } - RSGroupInfoManager = cps.get(0).getGroupInfoManager(); + infoManager = cps.get(0).getGroupInfoManager(); } } catch (IOException e) { throw new HBaseIOException("Failed to initialize GroupInfoManagerImpl", e); @@ -400,7 +389,7 @@ public class RSGroupBasedLoadBalancer implements RSGroupableBalancer, LoadBalanc } public boolean isOnline() { - return RSGroupInfoManager != null && RSGroupInfoManager.isOnline(); + return infoManager != null && infoManager.isOnline(); } @Override diff --git a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java index 60afee0c14..e7b0e4a5b8 100644 --- a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java +++ b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfo.java @@ -86,8 +86,8 @@ public class RSGroupInfo { * * @param servers the servers */ - public void addAllServers(Collection
servers){ - servers.addAll(servers); + public void addAllServers(Collection
addresses){ + this.servers.addAll(addresses); } /** diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java index dea0a94812..2360ce8091 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/master/balancer/TestRSGroupBasedLoadBalancer.java @@ -163,33 +163,6 @@ public class TestRSGroupBasedLoadBalancer { } /** - * All regions have an assignment. - * - * @param regions - * @param servers - * @param assignments - * @throws java.io.IOException - * @throws java.io.FileNotFoundException - */ - private void assertImmediateAssignment(List regions, - List servers, - Map assignments) - throws IOException { - for (HRegionInfo region : regions) { - assertTrue(assignments.containsKey(region)); - ServerName server = assignments.get(region); - TableName tableName = region.getTable(); - - String groupName = - getMockedGroupInfoManager().getRSGroupOfTable(tableName); - assertTrue(StringUtils.isNotEmpty(groupName)); - RSGroupInfo gInfo = getMockedGroupInfoManager().getRSGroup(groupName); - assertTrue("Region is not correctly assigned to group servers.", - gInfo.containsServer(server.getAddress())); - } - } - - /** * Tests the bulk assignment used during cluster startup. * * Round-robin. Should yield a balanced cluster so same invariant as the diff --git a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java index a23a430f57..4c538ec08a 100644 --- a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java +++ b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroupsBase.java @@ -37,12 +37,12 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.constraint.ConstraintException; +import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.net.Address; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; -import org.junit.Ignore; import org.junit.Test; import java.io.IOException; @@ -55,6 +55,7 @@ import java.util.Set; import java.util.TreeMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -121,12 +122,13 @@ public abstract class TestRSGroupsBase { } protected void deleteGroups() throws IOException { - RSGroupAdmin groupAdmin = new RSGroupAdminClient(TEST_UTIL.getConnection()); - for(RSGroupInfo group: groupAdmin.listRSGroups()) { - if(!group.getName().equals(RSGroupInfo.DEFAULT_GROUP)) { - groupAdmin.moveTables(group.getTables(), RSGroupInfo.DEFAULT_GROUP); - groupAdmin.moveServers(group.getServers(), RSGroupInfo.DEFAULT_GROUP); - groupAdmin.removeRSGroup(group.getName()); + try (RSGroupAdmin groupAdmin = new RSGroupAdminClient(TEST_UTIL.getConnection())) { + for(RSGroupInfo group: groupAdmin.listRSGroups()) { + if(!group.getName().equals(RSGroupInfo.DEFAULT_GROUP)) { + groupAdmin.moveTables(group.getTables(), RSGroupInfo.DEFAULT_GROUP); + groupAdmin.moveServers(group.getServers(), RSGroupInfo.DEFAULT_GROUP); + groupAdmin.removeRSGroup(group.getName()); + } } } } @@ -488,6 +490,7 @@ public abstract class TestRSGroupsBase { break; } } + assertNotNull(targetServer); final AdminProtos.AdminService.BlockingInterface targetRS = admin.getConnection().getAdmin(targetServer); @@ -508,10 +511,10 @@ public abstract class TestRSGroupsBase { TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() { @Override public boolean evaluate() throws Exception { - return - getTableRegionMap().get(tableName) != null && - getTableRegionMap().get(tableName).size() == 6 && - admin.getClusterStatus().getRegionsInTransition().size() < 1; + List regions = getTableRegionMap().get(tableName); + Set regionsInTransition = admin.getClusterStatus().getRegionsInTransition(); + return (regions != null && getTableRegionMap().get(tableName).size() == 6) && + ( regionsInTransition == null || regionsInTransition.size() < 1); } }); @@ -583,7 +586,6 @@ public abstract class TestRSGroupsBase { appInfo.getServers().iterator().next().toString()); AdminProtos.AdminService.BlockingInterface targetRS = admin.getConnection().getAdmin(targetServer); - HRegionInfo targetRegion = ProtobufUtil.getOnlineRegions(targetRS).get(0); Assert.assertEquals(1, ProtobufUtil.getOnlineRegions(targetRS).size()); try { @@ -779,10 +781,12 @@ public abstract class TestRSGroupsBase { TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate() { @Override public boolean evaluate() throws Exception { - return getTableRegionMap().get(tableName) != null && - getTableRegionMap().get(tableName).size() == 5 && - getTableServerRegionMap().get(tableName).size() == 1 && - admin.getClusterStatus().getRegionsInTransition().size() < 1; + List regions = getTableRegionMap().get(tableName); + Map> serverMap = getTableServerRegionMap().get(tableName); + Set regionsInTransition = admin.getClusterStatus().getRegionsInTransition(); + return (regions != null && regions.size() == 5) && + (serverMap != null && serverMap.size() == 1) && + (regionsInTransition == null || regionsInTransition.size() < 1); } }); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java index 1701588a42..1e9bc8b76c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/conf/ConfigurationManager.java @@ -79,7 +79,7 @@ public class ConfigurationManager { // notified when the configuration is reloaded from disk. This is a set // constructed from a WeakHashMap, whose entries would be removed if the // observer classes go out of scope. - private Set configurationObservers = + private final Set configurationObservers = Collections.newSetFromMap(new WeakHashMap()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java index f2b201b42c..275218f39d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java @@ -263,11 +263,9 @@ public abstract class CoprocessorHost { Coprocessor impl; Object o = null; try { - o = implClass.newInstance(); + o = implClass.getDeclaredConstructor().newInstance(); impl = (Coprocessor)o; - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { + } catch (Exception e) { throw new IOException(e); } // create the environment @@ -402,7 +400,7 @@ public abstract class CoprocessorHost { /** Current coprocessor state */ Coprocessor.State state = Coprocessor.State.UNINSTALLED; /** Accounting for tables opened by the coprocessor */ - protected List openTables = + protected final List openTables = Collections.synchronizedList(new ArrayList()); private int seq; private Configuration conf; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java index daa55808be..754ea651ee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java @@ -295,6 +295,7 @@ public class HFileSystem extends FilterFileSystem { (cp.getClass().getClassLoader(), new Class[]{ClientProtocol.class, Closeable.class}, new InvocationHandler() { + @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { try { @@ -354,6 +355,7 @@ public class HFileSystem extends FilterFileSystem { * datanode is actually dead, so if we use it it will timeout. */ static class ReorderWALBlocks implements ReorderBlocks { + @Override public void reorderBlocks(Configuration conf, LocatedBlocks lbs, String src) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java index 2d78a3ff1f..8b1e375cb6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/http/SslSocketConnectorSecure.java @@ -41,6 +41,7 @@ public class SslSocketConnectorSecure extends SslSocketConnector { * Create a new ServerSocket that will not accept SSLv3 connections, * but will accept TLSv1.x connections. */ + @Override protected ServerSocket newServerSocket(String host, int port,int backlog) throws IOException { SSLServerSocket socket = (SSLServerSocket) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java index ad749f3b9b..6ad5cedf30 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FSDataInputStreamWrapper.java @@ -67,7 +67,7 @@ public class FSDataInputStreamWrapper { */ private volatile FSDataInputStream stream = null; private volatile FSDataInputStream streamNoFsChecksum = null; - private Object streamNoFsChecksumFirstCreateLock = new Object(); + private final Object streamNoFsChecksumFirstCreateLock = new Object(); // The configuration states that we should validate hbase checksums private boolean useHBaseChecksumConfigured; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java index a38e3c1794..a77708567a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/Reference.java @@ -222,6 +222,7 @@ public class Reference { return Arrays.hashCode(splitkey) + region.hashCode(); } + @Override public boolean equals(Object o) { if (this == o) return true; if (o == null) return false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java index 503942731d..6db081762b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java @@ -228,6 +228,7 @@ public abstract class AbstractHFileReader * @return the total heap size of data and meta block indexes in bytes. Does * not take into account non-root blocks of a multilevel data index. */ + @Override public long indexSize() { return (dataBlockIndexReader != null ? dataBlockIndexReader.heapSize() : 0) + ((metaBlockIndexReader != null) ? metaBlockIndexReader.heapSize() @@ -259,6 +260,7 @@ public abstract class AbstractHFileReader this.isPrimaryReplicaReader = isPrimaryReplicaReader; } + @Override public FileInfo loadFileInfo() throws IOException { return fileInfo; } @@ -345,6 +347,7 @@ public abstract class AbstractHFileReader /** For testing */ abstract HFileBlock.FSReader getUncachedBlockReader(); + @Override public Path getPath() { return path; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java index 2c2b039d11..69206935f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java @@ -129,6 +129,7 @@ public interface BlockCache extends Iterable { /** * @return Iterator over the blocks in the cache. */ + @Override Iterator iterator(); /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java index 2a3a47dffa..09a662ad94 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java @@ -539,7 +539,7 @@ public class FixedFileTrailer { public void setComparatorClass(Class klass) { // Is the comparator instantiable? try { - KVComparator comp = klass.newInstance(); + KVComparator comp = klass.getDeclaredConstructor().newInstance(); // HFile V2 legacy comparator class names. if (KeyValue.COMPARATOR.getClass().equals(klass)) { @@ -592,11 +592,8 @@ public class FixedFileTrailer { public static KVComparator createComparator( String comparatorClassName) throws IOException { try { - return getComparatorClass(comparatorClassName).newInstance(); - } catch (InstantiationException e) { - throw new IOException("Comparator class " + comparatorClassName + - " is not instantiable", e); - } catch (IllegalAccessException e) { + return getComparatorClass(comparatorClassName).getDeclaredConstructor().newInstance(); + } catch (Exception e) { throw new IOException("Comparator class " + comparatorClassName + " is not instantiable", e); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java index 1f96b259e6..1600112add 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java @@ -674,82 +674,102 @@ public class HFile { return this; } + @Override public void clear() { this.map.clear(); } + @Override public Comparator comparator() { return map.comparator(); } + @Override public boolean containsKey(Object key) { return map.containsKey(key); } + @Override public boolean containsValue(Object value) { return map.containsValue(value); } + @Override public Set> entrySet() { return map.entrySet(); } + @Override public boolean equals(Object o) { return map.equals(o); } + @Override public byte[] firstKey() { return map.firstKey(); } + @Override public byte[] get(Object key) { return map.get(key); } + @Override public int hashCode() { return map.hashCode(); } + @Override public SortedMap headMap(byte[] toKey) { return this.map.headMap(toKey); } + @Override public boolean isEmpty() { return map.isEmpty(); } + @Override public Set keySet() { return map.keySet(); } + @Override public byte[] lastKey() { return map.lastKey(); } + @Override public byte[] put(byte[] key, byte[] value) { return this.map.put(key, value); } + @Override public void putAll(Map m) { this.map.putAll(m); } + @Override public byte[] remove(Object key) { return this.map.remove(key); } + @Override public int size() { return map.size(); } + @Override public SortedMap subMap(byte[] fromKey, byte[] toKey) { return this.map.subMap(fromKey, toKey); } + @Override public SortedMap tailMap(byte[] fromKey) { return this.map.tailMap(fromKey); } + @Override public Collection values() { return map.values(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index d98a7b5529..b46a58644e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -224,6 +224,7 @@ public class HFileBlock implements Cacheable { */ static final CacheableDeserializer BLOCK_DESERIALIZER = new CacheableDeserializer() { + @Override public HFileBlock deserialize(ByteBuffer buf, boolean reuse) throws IOException{ // The buf has the file block followed by block metadata. // Set limit to just before the BLOCK_METADATA_SPACE then rewind. @@ -411,6 +412,7 @@ public class HFileBlock implements Cacheable { return nextBlockOnDiskSize; } + @Override public BlockType getBlockType() { return blockType; } @@ -1931,6 +1933,22 @@ public class HFileBlock implements Cacheable { } @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((blockType == null) ? 0 : blockType.hashCode()); + result = prime * result + ((buf == null) ? 0 : buf.hashCode()); + result = prime * result + ((fileContext == null) ? 0 : fileContext.hashCode()); + result = prime * result + nextBlockOnDiskSize; + result = prime * result + (int) (offset ^ (offset >>> 32)); + result = prime * result + onDiskDataSizeWithHeader; + result = prime * result + onDiskSizeWithoutHeader; + result = prime * result + (int) (prevBlockOffset ^ (prevBlockOffset >>> 32)); + result = prime * result + uncompressedSizeWithoutHeader; + return result; + } + + @Override public boolean equals(Object comparison) { if (this == comparison) { return true; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java index 086395ca6b..40ad1083d0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV3.java @@ -91,6 +91,7 @@ public class HFileWriterV3 extends HFileWriterV2 { } } + @Override protected void finishFileInfo() throws IOException { super.finishFileInfo(); if (hFileContext.getDataBlockEncoding() == DataBlockEncoding.PREFIX_TREE) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 69fea76d14..1d17a7d5f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -439,6 +439,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { * @param cacheKey block's cache key * @param buf block buffer */ + @Override public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) { cacheBlock(cacheKey, buf, false, false); } @@ -488,7 +489,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { // Promote this to L1. if (result != null && caching) { - cacheBlock(cacheKey, result, /* inMemory = */ false, /* cacheData = */ true); + cacheBlock(cacheKey, result, false, true); } return result; } @@ -790,6 +791,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { return totalSize; } + @Override public int compareTo(BlockBucket that) { return Long.compare(this.overflow(), that.overflow()); } @@ -963,6 +965,7 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { *

Includes: total accesses, hits, misses, evicted blocks, and runs * of the eviction processes. */ + @Override public CacheStats getStats() { return this.stats; } @@ -1078,19 +1081,24 @@ public class LruBlockCache implements ResizableBlockCache, HeapSize { long acceptableSize() { return (long)Math.floor(this.maxSize * this.acceptableFactor); } + private long minSize() { return (long)Math.floor(this.maxSize * this.minFactor); } + private long singleSize() { return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor); } + private long multiSize() { return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor); } + private long memorySize() { return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor); } + @Override public void shutdown() { if (victimHandler != null) victimHandler.shutdown(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java index 0828563e9c..2af6bdde11 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java @@ -90,6 +90,7 @@ public class LruCachedBlock implements HeapSize, Comparable { return this.cachedTime; } + @Override public long heapSize() { return size; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java index a6c84d0a49..ac7c4e7163 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java @@ -315,7 +315,7 @@ public final class BucketAllocator { this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes; Arrays.sort(this.bucketSizes); this.bigItemSize = Ints.max(this.bucketSizes); - this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize; + this.bucketCapacity = (long) FEWEST_ITEMS_IN_BUCKET * bigItemSize; buckets = new Bucket[(int) (availableSpace / bucketCapacity)]; if (buckets.length < this.bucketSizes.length) throw new BucketAllocatorException("Bucket allocator size too small (" + buckets.length + @@ -413,6 +413,7 @@ public final class BucketAllocator { } } + @Override public String toString() { StringBuilder sb = new StringBuilder(1024); for (int i = 0; i < buckets.length; ++i) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index b8d7453af7..4ff73b49a0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -840,6 +840,7 @@ public class BucketCache implements BlockCache, HeapSize { this.writerEnabled = false; } + @Override public void run() { List entries = new ArrayList(); try { @@ -1324,8 +1325,49 @@ public class BucketCache implements BlockCache, HeapSize { } @Override - public boolean equals(Object that) { - return this == that; + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + getOuterType().hashCode(); + result = prime * result + (int) (bucketSize ^ (bucketSize >>> 32)); + result = prime * result + ((queue == null) ? 0 : queue.hashCode()); + result = prime * result + (int) (totalSize ^ (totalSize >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + BucketEntryGroup other = (BucketEntryGroup) obj; + if (!getOuterType().equals(other.getOuterType())) { + return false; + } + if (bucketSize != other.bucketSize) { + return false; + } + if (queue == null) { + if (other.queue != null) { + return false; + } + } else if (!queue.equals(other.queue)) { + return false; + } + if (totalSize != other.totalSize) { + return false; + } + return true; + } + + private BucketCache getOuterType() { + return BucketCache.this; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java index 0e33a569f5..46147a5ef1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/CachedEntryQueue.java @@ -58,7 +58,7 @@ public class CachedEntryQueue { initialSize++; } queue = MinMaxPriorityQueue.orderedBy(new Comparator>() { - + @Override public int compare(Entry entry1, Entry entry2) { return BucketEntry.COMPARATOR.compare(entry1.getValue(), entry2.getValue()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java index 1a8fa5b010..d1c43d546c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/BufferChain.java @@ -105,7 +105,7 @@ class BufferChain { try { long ret = channel.write(buffers, bufferOffset, bufCount); if (ret > 0) { - remaining -= ret; + remaining = (int) (remaining - ret); } return ret; } finally { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 5617acb703..3000622014 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -54,7 +54,6 @@ import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; @@ -768,8 +767,8 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { int start = 0; int end = numConnections - 1; if (!force) { - start = rand.nextInt() % numConnections; - end = rand.nextInt() % numConnections; + start = rand.nextInt(numConnections); + end = rand.nextInt(numConnections); int temp; if (end < start) { temp = start; @@ -1859,14 +1858,15 @@ public class RpcServer implements RpcServerInterface, ConfigurationObserver { String className = header.getCellBlockCodecClass(); if (className == null || className.length() == 0) return; try { - this.codec = (Codec)Class.forName(className).newInstance(); + this.codec = (Codec)Class.forName(className).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new UnsupportedCellCodecException(className, e); } if (!header.hasCellBlockCompressorClass()) return; className = header.getCellBlockCompressorClass(); try { - this.compressionCodec = (CompressionCodec)Class.forName(className).newInstance(); + this.compressionCodec = (CompressionCodec) + Class.forName(className).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new UnsupportedCompressionCodecException(className, e); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java index ee6da759af..2ae4996083 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java @@ -94,6 +94,7 @@ implements TableMap { * @param reporter * @throws IOException */ + @Override public void map(ImmutableBytesWritable key, Result value, OutputCollector output, Reporter reporter) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java index a7d23d49d9..fd747bafba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java @@ -49,6 +49,7 @@ public class TableInputFormat extends TableInputFormatBase implements */ public static final String COLUMN_LIST = "hbase.mapred.tablecolumns"; + @Override public void configure(JobConf job) { try { initialize(job); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java index 874e593305..bc82f71b1d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java @@ -102,6 +102,7 @@ implements InputFormat { * @see org.apache.hadoop.mapred.InputFormat#getRecordReader(InputSplit, * JobConf, Reporter) */ + @Override public RecordReader getRecordReader( InputSplit split, JobConf job, Reporter reporter) throws IOException { @@ -182,6 +183,7 @@ implements InputFormat { * * @see org.apache.hadoop.mapred.InputFormat#getSplits(org.apache.hadoop.mapred.JobConf, int) */ + @Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { if (this.table == null) { initialize(job); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java index 281d13eef2..bd914df16c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java @@ -94,6 +94,7 @@ implements RecordReader { this.recordReaderImpl.setRowFilter(rowFilter); } + @Override public void close() { this.recordReaderImpl.close(); } @@ -103,6 +104,7 @@ implements RecordReader { * * @see org.apache.hadoop.mapred.RecordReader#createKey() */ + @Override public ImmutableBytesWritable createKey() { return this.recordReaderImpl.createKey(); } @@ -112,10 +114,12 @@ implements RecordReader { * * @see org.apache.hadoop.mapred.RecordReader#createValue() */ + @Override public Result createValue() { return this.recordReaderImpl.createValue(); } + @Override public long getPos() { // This should be the ordinal tuple in the range; @@ -123,6 +127,7 @@ implements RecordReader { return this.recordReaderImpl.getPos(); } + @Override public float getProgress() { // Depends on the total number of tuples and getPos return this.recordReaderImpl.getPos(); @@ -134,6 +139,7 @@ implements RecordReader { * @return true if there was more data * @throws IOException */ + @Override public boolean next(ImmutableBytesWritable key, Result value) throws IOException { return this.recordReaderImpl.next(key, value); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java index 09290fd947..314e10b2da 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java @@ -126,12 +126,13 @@ public class CellCounter { */ @Override + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public void map(ImmutableBytesWritable row, Result values, Context context) throws IOException { Preconditions.checkState(values != null, "values passed to the map is null"); - try { byte[] currentRow = values.getRow(); if (lastRow == null || !Bytes.equals(lastRow, currentRow)) { @@ -179,10 +180,10 @@ public class CellCounter { static class IntSumReducer extends Reducer { - private IntWritable result = new IntWritable(); - public void reduce(Key key, Iterable values, - Context context) + + @Override + public void reduce(Key key, Iterable values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 76746d126d..fc4aee31a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -22,6 +22,7 @@ import java.io.UnsupportedEncodingException; import java.net.InetSocketAddress; import java.net.URLDecoder; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -394,7 +395,7 @@ public class HFileOutputFormat2 new TreeSet(startKeys); ImmutableBytesWritable first = sorted.first(); - if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) { + if (!Bytes.equals(first.get(), HConstants.EMPTY_BYTE_ARRAY)) { throw new IllegalArgumentException( "First region of table should have empty start key. Instead has: " + Bytes.toStringBinary(first.get())); @@ -646,7 +647,8 @@ public class HFileOutputFormat2 continue; } try { - confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(), + confValMap.put(URLDecoder.decode(familySplit[0], "UTF-8") + .getBytes(StandardCharsets.UTF_8), URLDecoder.decode(familySplit[1], "UTF-8")); } catch (UnsupportedEncodingException e) { // will not happen with UTF-8 encoding diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java index fdf351ec6c..7cfe27d54f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java @@ -61,6 +61,7 @@ extends TableMapper { * @throws IOException When writing the record fails. * @throws InterruptedException When the job is aborted. */ + @Override public void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException { context.write(key, value); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java index d1dba1d4d0..ca5ef01de8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java @@ -166,7 +166,7 @@ public class MultithreadedTableMapper extends TableMapper { @Override public boolean nextKeyValue() throws IOException, InterruptedException { - synchronized (outer) { + synchronized (this) { if (!outer.nextKeyValue()) { return false; } @@ -177,6 +177,7 @@ public class MultithreadedTableMapper extends TableMapper { } } + @Override public ImmutableBytesWritable getCurrentKey() { return key; } @@ -197,7 +198,7 @@ public class MultithreadedTableMapper extends TableMapper { @Override public void write(K2 key, V2 value) throws IOException, InterruptedException { - synchronized (outer) { + synchronized (this) { outer.write(key, value); } } @@ -225,6 +226,7 @@ public class MultithreadedTableMapper extends TableMapper { outer.setStatus(status); } + @Override public float getProgress() { return 0; } @@ -281,7 +283,9 @@ public class MultithreadedTableMapper extends TableMapper { outer.getInputSplit()); Class wrappedMapperClass = Class.forName("org.apache.hadoop.mapreduce.lib.map.WrappedMapper"); Method getMapContext = wrappedMapperClass.getMethod("getMapContext", MapContext.class); - subcontext = (Context) getMapContext.invoke(wrappedMapperClass.newInstance(), mc); + subcontext = (Context) getMapContext.invoke( + wrappedMapperClass.getDeclaredConstructor().newInstance(), + mc); } catch (Exception ee) { // FindBugs: REC_CATCH_EXCEPTION // rethrow as IOE throw new IOException(e); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java index efb1cfd610..7b93e8f0e1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java @@ -49,6 +49,8 @@ import org.apache.zookeeper.KeeperException; * the active master of the cluster. */ @InterfaceAudience.Private +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Use of an atomic type both as monitor and condition variable is intended") public class ActiveMasterManager extends ZooKeeperListener { private static final Log LOG = LogFactory.getLog(ActiveMasterManager.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java index 809b980dc3..b8c088a583 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java @@ -714,6 +714,7 @@ public class AssignmentManager extends ZooKeeperListener { private void assignRegionsOnSSHCompletion() { LOG.info("Meta is rebuild by OfflineMetaRepair tool, assigning all user regions."); Thread regionAssignerThread = new Thread("RegionAssignerOnMetaRebuild") { + @Override public void run() { // Wait until all dead server processing finish while (serverManager.areDeadServersInProgress()) { @@ -1975,7 +1976,7 @@ public class AssignmentManager extends ZooKeeperListener { || t instanceof ServerNotRunningYetException) { // RS is aborting or stopping, we cannot offline the region since the region may need // to do WAL recovery. Until we see the RS expiration, we should retry. - sleepTime = 1 + conf.getInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, + sleepTime = 1L + conf.getInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, RpcClient.FAILED_SERVER_EXPIRY_DEFAULT); } else if (t instanceof NotServingRegionException) { @@ -1990,8 +1991,8 @@ public class AssignmentManager extends ZooKeeperListener { return; } else if ((t instanceof FailedServerException) || (state != null && t instanceof RegionAlreadyInTransitionException)) { - if(t instanceof FailedServerException) { - sleepTime = 1 + conf.getInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, + if (t instanceof FailedServerException) { + sleepTime = 1L + conf.getInt(RpcClient.FAILED_SERVER_EXPIRY_KEY, RpcClient.FAILED_SERVER_EXPIRY_DEFAULT); } else { // RS is already processing this region, only need to update the timestamp @@ -2359,7 +2360,8 @@ public class AssignmentManager extends ZooKeeperListener { return; } - if (plan != newPlan && !plan.getDestination().equals(newPlan.getDestination())) { + if (!plan.equals(newPlan) && + !plan.getDestination().equals(newPlan.getDestination())) { // Clean out plan we failed execute and one that doesn't look like it'll // succeed anyways; we need a new plan! // Transition back to OFFLINE diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java index 1ea57b4830..9b60c003a2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentVerificationReport.java @@ -242,7 +242,7 @@ public class AssignmentVerificationReport { this.maxDispersionScoreServerSet.clear(); this.maxDispersionScoreServerSet.add(primaryRS); this.maxDispersionScore = dispersionScore; - } else if (dispersionScore == this.maxDispersionScore) { + } else if (Math.abs(dispersionScore - this.maxDispersionScore) < 0.0000001) { this.maxDispersionScoreServerSet.add(primaryRS); } @@ -260,7 +260,7 @@ public class AssignmentVerificationReport { this.minDispersionScoreServerSet.clear(); this.minDispersionScoreServerSet.add(primaryRS); this.minDispersionScore = dispersionScore; - } else if (dispersionScore == this.minDispersionScore) { + } else if (Math.abs(dispersionScore - this.minDispersionScore) < 0.0000001) { this.minDispersionScoreServerSet.add(primaryRS); } @@ -418,7 +418,7 @@ public class AssignmentVerificationReport { this.minDispersionScoreServerSet.clear(); this.minDispersionScoreServerSet.add(primaryRS); this.minDispersionScore = dispersionScore; - } else if (dispersionScore == this.minDispersionScore) { + } else if (Math.abs(dispersionScore - this.minDispersionScore) < 0.0000001) { this.minDispersionScoreServerSet.add(primaryRS); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java index e90aae6746..4d4358c579 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java @@ -121,10 +121,8 @@ public class ClusterStatusPublisher extends ScheduledChore { this.master = master; this.messagePeriod = conf.getInt(STATUS_PUBLISH_PERIOD, DEFAULT_STATUS_PUBLISH_PERIOD); try { - this.publisher = publisherClass.newInstance(); - } catch (InstantiationException e) { - throw new IOException("Can't create publisher " + publisherClass.getName(), e); - } catch (IllegalAccessException e) { + this.publisher = publisherClass.getDeclaredConstructor().newInstance(); + } catch (Exception e) { throw new IOException("Can't create publisher " + publisherClass.getName(), e); } this.publisher.connect(conf); @@ -175,7 +173,8 @@ public class ClusterStatusPublisher extends ScheduledChore { publisher.publish(cs); } - protected void cleanup() { + @Override + protected synchronized void cleanup() { connected = false; publisher.close(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java index 75ab00ca80..c1b5180cb6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/DeadServer.java @@ -155,6 +155,7 @@ public class DeadServer { } } + @Override public synchronized String toString() { StringBuilder sb = new StringBuilder(); for (ServerName sn : deadServers.keySet()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 43ead3781c..a97f9f4264 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -2537,6 +2537,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { return info.getInfoPort(); } + @Override public String getRegionServerVersion(final ServerName sn) { RegionServerInfo info = this.regionServerTracker.getRegionServerInfo(sn); if (info != null && info.hasVersionInfo()) { @@ -2826,6 +2827,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { /** * @return the underlying snapshot manager */ + @Override public SnapshotManager getSnapshotManager() { return this.snapshotManager; } @@ -2833,6 +2835,7 @@ public class HMaster extends HRegionServer implements MasterServices, Server { /** * @return the underlying MasterProcedureManagerHost */ + @Override public MasterProcedureManagerHost getMasterProcedureManagerHost() { return mpmHost; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java index cc5ca0b052..11e458d850 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java @@ -69,11 +69,12 @@ public class HMasterCommandLine extends ServerCommandLine { this.masterClass = masterClass; } + @Override protected String getUsage() { return USAGE; } - + @Override public int run(String args[]) throws Exception { Options opt = new Options(); opt.addOption("localRegionServers", true, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java index 937b32ff82..e387f5950d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.master; import java.util.List; import java.util.Map; -import edu.umd.cs.findbugs.annotations.Nullable; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.conf.ConfigurationObserver; import org.apache.hadoop.conf.Configurable; @@ -151,5 +150,6 @@ public interface LoadBalancer extends Configurable, Stoppable, ConfigurationObse * Notification that config has changed * @param conf */ + @Override void onConfigurationChange(Configuration conf); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java index 8cc2c03aa8..11d9159142 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java @@ -189,7 +189,6 @@ import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.Repor import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest; import org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse; import org.apache.hadoop.hbase.regionserver.RSRpcServices; -import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.access.AccessController; import org.apache.hadoop.hbase.security.visibility.VisibilityController; @@ -314,6 +313,7 @@ public class MasterRpcServices extends RSRpcServices /** * @return list of blocking services and their security info classes that this server supports */ + @Override protected List getServices() { List bssi = new ArrayList(4); bssi.add(new BlockingServiceAndInterface( diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index 040342f9f9..c889395685 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -80,7 +80,6 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.RetryCounter; import org.apache.hadoop.hbase.util.RetryCounterFactory; -import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Triple; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; @@ -109,6 +108,8 @@ import org.apache.zookeeper.KeeperException; * and has completed the handling. */ @InterfaceAudience.Private +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Synchronization on concurrent map is intended") public class ServerManager { public static final String WAIT_ON_REGIONSERVERS_MAXTOSTART = "hbase.master.wait.on.regionservers.maxtostart"; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java index d5620b9974..2cdbbb7de1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java @@ -820,7 +820,7 @@ public class SplitLogManager { public enum TerminationStatus { IN_PROGRESS("in_progress"), SUCCESS("success"), FAILURE("failure"), DELETED("deleted"); - String statusMsg; + final String statusMsg; TerminationStatus(String msg) { statusMsg = msg; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java index 86e9093a57..16ae41a7a5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java @@ -369,11 +369,13 @@ public abstract class TableLockManager { serverName, writeLockTimeoutMs, false, purpose); } + @Override public TableLock readLock(TableName tableName, String purpose) { return new TableLockImpl(tableName, zkWatcher, serverName, readLockTimeoutMs, true, purpose); } + @Override public void visitAllLocks(MetadataHandler handler) throws IOException { for (String tableName : getTableNames()) { String tableLockZNode = ZKUtil.joinZNode(zkWatcher.tableLockZNode, tableName); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java index d5edfab5d2..06ab3b5392 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java @@ -75,7 +75,7 @@ class RegionLocationFinder { private CacheLoader loader = new CacheLoader() { - + @Override public ListenableFuture reload(final HRegionInfo hri, HDFSBlocksDistribution oldValue) throws Exception { return executor.submit(new Callable() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java index 3047d28192..374070ceb6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/StochasticLoadBalancer.java @@ -348,8 +348,8 @@ public class StochasticLoadBalancer extends BaseLoadBalancer { // Allow turning this feature off if the locality cost is not going to // be used in any computations. RegionLocationFinder finder = null; - if (this.localityCost != null && this.localityCost.getMultiplier() > 0 - || this.rackLocalityCost != null && this.rackLocalityCost.getMultiplier() > 0) { + if ((this.localityCost != null && this.localityCost.getMultiplier() > 0) + || (this.rackLocalityCost != null && this.rackLocalityCost.getMultiplier() > 0)) { finder = this.regionFinder; } @@ -1399,7 +1399,7 @@ public class StochasticLoadBalancer extends BaseLoadBalancer { // Now if we found a region load get the type of cost that was requested. if (regionLoadList != null) { - cost += getRegionLoadCost(regionLoadList); + cost = (long) (cost + getRegionLoadCost(regionLoadList)); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java index d431b2ebbc..743d9a43e8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java @@ -109,7 +109,7 @@ public abstract class CleanerChore extends Schedu Class c = Class.forName(className).asSubclass( FileCleanerDelegate.class); @SuppressWarnings("unchecked") - T cleaner = (T) c.newInstance(); + T cleaner = (T) c.getDeclaredConstructor().newInstance(); cleaner.setConf(conf); return cleaner; } catch (Exception e) { @@ -282,7 +282,7 @@ public abstract class CleanerChore extends Schedu } @Override - public void cleanup() { + public synchronized void cleanup() { for (T lc : this.cleanersChain) { try { lc.stop("Exiting"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java index cd728fc76f..43a0f65be9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java @@ -75,6 +75,7 @@ public abstract class TableEventHandler extends EventHandler { this.tableName = tableName; } + @Override public TableEventHandler prepare() throws IOException { //acquire the table write lock, blocking this.tableLock = masterServices.getTableLockManager() diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java index 5f37720397..6367dec0d7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java @@ -573,6 +573,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { super(serverName); } + @Override public boolean requireExclusiveLock(Procedure proc) { ServerProcedureInterface spi = (ServerProcedureInterface)proc; switch (spi.getServerOperationType()) { @@ -617,6 +618,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { } } + @Override public boolean requireExclusiveLock(Procedure proc) { TableProcedureInterface tpi = (TableProcedureInterface)proc; switch (tpi.getTableOperationType()) { @@ -1045,6 +1047,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { /** * True if the queue is not in the run-queue and it is owned by an event. */ + @Override public boolean isSuspended() { return suspended; } @@ -1093,6 +1096,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { // This should go away when we have the new AM and its events // and we move xlock to the lock-event-queue. + @Override public synchronized boolean isAvailable() { return !hasExclusiveLock() && !isEmpty(); } @@ -1128,6 +1132,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { super(key, priority); } + @Override public void add(final Procedure proc, final boolean addToFront) { if (addToFront) { addFront(proc); @@ -1144,6 +1149,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { runnables.addLast(proc); } + @Override public Procedure peek() { return runnables.peek(); } @@ -1158,6 +1164,7 @@ public class MasterProcedureScheduler implements ProcedureRunnableSet { return runnables.isEmpty(); } + @Override public int size() { return runnables.size(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java index 1185073bab..0ed75a322d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java @@ -171,7 +171,6 @@ public class RestoreSnapshotHandler extends TableEventHandler implements Snapsho // 5. restore acl of snapshot into the table. if (restoreAcl && snapshot.hasUsersAndPermissions() - && snapshot.getUsersAndPermissions() != null && SnapshotDescriptionUtils.isSecurityAvailable(server.getConfiguration())) { RestoreSnapshotHelper.restoreSnapshotACL(snapshot, tableName, server.getConfiguration()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java index 39387cbebe..4060e0c13a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java @@ -132,6 +132,7 @@ public abstract class TakeSnapshotHandler extends EventHandler implements Snapsh return htd; } + @Override public TakeSnapshotHandler prepare() throws Exception { super.prepare(); this.tableLock.acquire(); // after this, you should ensure to release this lock in diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java index 0279a6038d..c00a125fa1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java @@ -103,7 +103,7 @@ public class Procedure implements Callable, ForeignExceptionListener { // /** lock to prevent nodes from acquiring and then releasing before we can track them */ - private Object joinBarrierLock = new Object(); + private final Object joinBarrierLock = new Object(); private final List acquiringMembers; private final List inBarrierMembers; private final HashMap dataFromFinishedMembers; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java index 3ab4ac5558..2c9b5ddcac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java @@ -88,11 +88,9 @@ public abstract class ProcedureManagerHost { E impl; Object o = null; try { - o = implClass.newInstance(); + o = implClass.getDeclaredConstructor().newInstance(); impl = (E)o; - } catch (InstantiationException e) { - throw new IOException(e); - } catch (IllegalAccessException e) { + } catch (Exception e) { throw new IOException(e); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java index 892733828e..6223c0ff70 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java @@ -153,6 +153,7 @@ abstract public class Subprocedure implements Callable { * Subprocedure, ForeignException)}. */ @SuppressWarnings("finally") + @Override final public Void call() { LOG.debug("Starting subprocedure '" + barrierName + "' with timeout " + executionTimeoutTimer.getMaxTime() + "ms"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java index 9bfa8dbf52..f1b7ff9e36 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/MasterQuotaManager.java @@ -344,12 +344,14 @@ public class MasterQuotaManager implements RegionStateListener { return -1; } + @Override public void onRegionMerged(HRegionInfo hri) throws IOException { if (initialized) { namespaceQuotaManager.updateQuotaForRegionMerge(hri); } } + @Override public void onRegionSplit(HRegionInfo hri) throws IOException { if (initialized) { namespaceQuotaManager.checkQuotaToSplitRegion(hri); @@ -498,7 +500,7 @@ public class MasterQuotaManager implements RegionStateListener { } private static class NamedLock { - private HashSet locks = new HashSet(); + private final HashSet locks = new HashSet(); public void lock(final T name) throws InterruptedException { synchronized (locks) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java index 39f145634c..2ceaa47d85 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaCache.java @@ -199,11 +199,11 @@ public class QuotaCache implements Stoppable { // Prefetch online tables/namespaces for (TableName table : QuotaCache.this.rsServices.getOnlineTables()) { if (table.isSystemTable()) continue; - if (!QuotaCache.this.tableQuotaCache.contains(table)) { + if (!QuotaCache.this.tableQuotaCache.containsKey(table)) { QuotaCache.this.tableQuotaCache.putIfAbsent(table, new QuotaState()); } String ns = table.getNamespaceAsString(); - if (!QuotaCache.this.namespaceQuotaCache.contains(ns)) { + if (!QuotaCache.this.namespaceQuotaCache.containsKey(ns)) { QuotaCache.this.namespaceQuotaCache.putIfAbsent(ns, new QuotaState()); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java index e6fe9cdbb7..d35d620a9c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java @@ -314,6 +314,7 @@ public class CompactSplitThread implements CompactionRequestor, PropagatingConfi return ret; } + @Override public CompactionRequest requestCompaction(final Region r, final Store s, final String why, int priority, CompactionRequest request, User user) throws IOException { return requestCompactionInternal(r, s, why, priority, request, true, user); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 7a5720ae21..c68f813041 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -37,6 +37,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; import java.lang.reflect.Constructor; +import java.nio.charset.StandardCharsets; import java.text.ParseException; import java.util.AbstractList; import java.util.ArrayList; @@ -76,7 +77,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.lang.RandomStringUtils; @@ -206,6 +206,8 @@ import org.apache.htrace.Trace; import org.apache.htrace.TraceScope; @InterfaceAudience.Private +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Synchronization on concurrent map is intended") public class HRegion implements HeapSize, PropagatingConfigurationObserver, Region { private static final Log LOG = LogFactory.getLog(HRegion.class); @@ -1012,7 +1014,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi this.stores.put(store.getFamily().getName(), store); long storeMaxSequenceId = store.getMaxSequenceId(); - maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), + maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(StandardCharsets.UTF_8), storeMaxSequenceId); if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) { maxSeqId = storeMaxSequenceId; @@ -5393,7 +5395,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi Store store = this.stores.get(column); if (store == null) { throw new IllegalArgumentException("No column family : " + - new String(column) + " available"); + new String(column, StandardCharsets.UTF_8) + " available"); } Collection storeFiles = store.getStorefiles(); if (storeFiles == null) continue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 32bab6b303..4853b2b050 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -26,6 +26,7 @@ import java.lang.reflect.Constructor; import java.net.BindException; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collection; @@ -209,6 +210,8 @@ import sun.misc.SignalHandler; */ @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @SuppressWarnings("deprecation") +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Use of an atomic type both as monitor and condition variable is intended") public class HRegionServer extends HasThread implements RegionServerServices, LastSequenceId, ConfigurationObserver { @@ -1666,7 +1669,7 @@ public class HRegionServer extends HasThread implements if (((HRegion)r).shouldFlush(whyFlush)) { FlushRequester requester = server.getFlushRequester(); if (requester != null) { - long randomDelay = RandomUtils.nextInt(RANGE_OF_DELAY) + MIN_DELAY_TIME; + long randomDelay = (long) RandomUtils.nextInt(RANGE_OF_DELAY) + MIN_DELAY_TIME; LOG.info(getName() + " requesting flush of " + r.getRegionInfo().getRegionNameAsString() + " because " + whyFlush.toString() + @@ -2953,13 +2956,15 @@ public class HRegionServer extends HasThread implements } } - final Boolean previous = this.regionsInTransitionInRS.putIfAbsent(encodedName.getBytes(), + final Boolean previous = this.regionsInTransitionInRS + .putIfAbsent(encodedName.getBytes(StandardCharsets.UTF_8), Boolean.FALSE); if (Boolean.TRUE.equals(previous)) { LOG.info("Received CLOSE for the region:" + encodedName + " , which we are already " + "trying to OPEN. Cancelling OPENING."); - if (!regionsInTransitionInRS.replace(encodedName.getBytes(), previous, Boolean.FALSE)){ + if (!regionsInTransitionInRS.replace(encodedName.getBytes(StandardCharsets.UTF_8), + previous, Boolean.FALSE)){ // The replace failed. That should be an exceptional case, but theoretically it can happen. // We're going to try to do a standard close then. LOG.warn("The opening for region " + encodedName + " was done before we could cancel it." + @@ -2989,7 +2994,7 @@ public class HRegionServer extends HasThread implements if (actualRegion == null) { LOG.error("Received CLOSE for a region which is not online, and we're not opening."); - this.regionsInTransitionInRS.remove(encodedName.getBytes()); + this.regionsInTransitionInRS.remove(encodedName.getBytes(StandardCharsets.UTF_8)); // The master deletes the znode when it receives this exception. throw new NotServingRegionException("The region " + encodedName + " is not online, and is not opening."); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 1739553bd8..1e69dbf20d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -41,6 +41,7 @@ import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; @@ -139,8 +140,8 @@ public class HStore implements Store { volatile boolean forceMajor = false; /* how many bytes to write between status checks */ static int closeCheckInterval = 0; - private volatile long storeSize = 0L; - private volatile long totalUncompressedBytes = 0L; + private AtomicLong storeSize = new AtomicLong(); + private AtomicLong totalUncompressedBytes = new AtomicLong(); /** * RWLock for store operations. @@ -200,13 +201,13 @@ public class HStore implements Store { private Encryption.Context cryptoContext = Encryption.Context.NONE; - private volatile long flushedCellsCount = 0; - private volatile long compactedCellsCount = 0; - private volatile long majorCompactedCellsCount = 0; - private volatile long flushedCellsSize = 0; - private volatile long flushedOutputFileSize = 0; - private volatile long compactedCellsSize = 0; - private volatile long majorCompactedCellsSize = 0; + private AtomicLong flushedCellsCount = new AtomicLong(); + private AtomicLong compactedCellsCount = new AtomicLong(); + private AtomicLong majorCompactedCellsCount = new AtomicLong(); + private AtomicLong flushedCellsSize = new AtomicLong(); + private AtomicLong flushedOutputFileSize = new AtomicLong(); + private AtomicLong compactedCellsSize = new AtomicLong(); + private AtomicLong majorCompactedCellsSize = new AtomicLong(); /** * Constructor @@ -549,8 +550,8 @@ public class HStore implements Store { StoreFile storeFile = future.get(); if (storeFile != null) { long length = storeFile.getReader().length(); - this.storeSize += length; - this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes(); + this.storeSize.addAndGet(length); + this.totalUncompressedBytes.addAndGet(storeFile.getReader().getTotalUncompressedBytes()); if (LOG.isDebugEnabled()) { LOG.debug("loaded " + storeFile.toStringDetailed()); } @@ -844,8 +845,8 @@ public class HStore implements Store { private void bulkLoadHFile(StoreFile sf) throws IOException { StoreFile.Reader r = sf.getReader(); - this.storeSize += r.length(); - this.totalUncompressedBytes += r.getTotalUncompressedBytes(); + this.storeSize.addAndGet(r.length()); + this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes()); // Append the new storefile into the list this.lock.writeLock().lock(); @@ -1014,8 +1015,8 @@ public class HStore implements Store { StoreFile sf = createStoreFileAndReader(dstPath); StoreFile.Reader r = sf.getReader(); - this.storeSize += r.length(); - this.totalUncompressedBytes += r.getTotalUncompressedBytes(); + this.storeSize.addAndGet(r.length()); + this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes()); if (LOG.isInfoEnabled()) { LOG.info("Added " + sf + ", entries=" + r.getEntries() + @@ -1348,11 +1349,11 @@ public class HStore implements Store { writeCompactionWalRecord(filesToCompact, sfs); replaceStoreFiles(filesToCompact, sfs); if (cr.isMajor()) { - majorCompactedCellsCount += getCompactionProgress().totalCompactingKVs; - majorCompactedCellsSize += getCompactionProgress().totalCompactedSize; + majorCompactedCellsCount.addAndGet(getCompactionProgress().totalCompactingKVs); + majorCompactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize); } else { - compactedCellsCount += getCompactionProgress().totalCompactingKVs; - compactedCellsSize += getCompactionProgress().totalCompactedSize; + compactedCellsCount.addAndGet(getCompactionProgress().totalCompactingKVs); + compactedCellsSize.addAndGet(getCompactionProgress().totalCompactedSize); } for (StoreFile sf : sfs) { @@ -1475,7 +1476,7 @@ public class HStore implements Store { } } message.append("total size for store is ") - .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize, "", 1)) + .append(StringUtils.TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1)) .append(". This selection was in queue for ") .append(StringUtils.formatTimeDiff(compactionStartTime, cr.getSelectionTime())) .append(", and took ").append(StringUtils.formatTimeDiff(now, compactionStartTime)) @@ -1812,7 +1813,7 @@ public class HStore implements Store { completeCompaction(delSfs); LOG.info("Completed removal of " + delSfs.size() + " unnecessary (expired) file(s) in " + this + " of " + this.getRegionInfo().getRegionNameAsString() - + "; total size for store is " + TraditionalBinaryPrefix.long2String(storeSize, "", 1)); + + "; total size for store is " + TraditionalBinaryPrefix.long2String(storeSize.get(), "", 1)); } @Override @@ -1892,16 +1893,16 @@ public class HStore implements Store { protected void completeCompaction(final Collection compactedFiles, boolean removeFiles) throws IOException { LOG.debug("Completing compaction..."); - this.storeSize = 0L; - this.totalUncompressedBytes = 0L; + this.storeSize.set(0L); + this.totalUncompressedBytes.set(0L); for (StoreFile hsf : this.storeEngine.getStoreFileManager().getStorefiles()) { StoreFile.Reader r = hsf.getReader(); if (r == null) { LOG.warn("StoreFile " + hsf + " has a null Reader"); continue; } - this.storeSize += r.length(); - this.totalUncompressedBytes += r.getTotalUncompressedBytes(); + this.storeSize.addAndGet(r.length()); + this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes()); } } @@ -2147,7 +2148,7 @@ public class HStore implements Store { @Override public long getSize() { - return storeSize; + return storeSize.get(); } @Override @@ -2275,7 +2276,7 @@ public class HStore implements Store { @Override public long getStoreSizeUncompressed() { - return this.totalUncompressedBytes; + return this.totalUncompressedBytes.get(); } @Override @@ -2491,9 +2492,9 @@ public class HStore implements Store { committedFiles.add(sf.getPath()); } - HStore.this.flushedCellsCount += cacheFlushCount; - HStore.this.flushedCellsSize += cacheFlushSize; - HStore.this.flushedOutputFileSize += outputFileSize; + HStore.this.flushedCellsCount.addAndGet(cacheFlushCount); + HStore.this.flushedCellsSize.addAndGet(cacheFlushSize); + HStore.this.flushedOutputFileSize.addAndGet(outputFileSize); // Add new file to store files. Clear snapshot too while we have the Store write lock. return HStore.this.updateStorefiles(storeFiles, snapshot.getId()); @@ -2526,8 +2527,9 @@ public class HStore implements Store { StoreFileInfo storeFileInfo = fs.getStoreFileInfo(getColumnFamilyName(), file); StoreFile storeFile = createStoreFileAndReader(storeFileInfo); storeFiles.add(storeFile); - HStore.this.storeSize += storeFile.getReader().length(); - HStore.this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes(); + HStore.this.storeSize.addAndGet(storeFile.getReader().length()); + HStore.this.totalUncompressedBytes.addAndGet( + storeFile.getReader().getTotalUncompressedBytes()); if (LOG.isInfoEnabled()) { LOG.info("Region: " + HStore.this.getRegionInfo().getEncodedName() + " added " + storeFile + ", entries=" + storeFile.getReader().getEntries() + @@ -2567,7 +2569,7 @@ public class HStore implements Store { } public static final long FIXED_OVERHEAD = - ClassSize.align(ClassSize.OBJECT + (17 * ClassSize.REFERENCE) + (11 * Bytes.SIZEOF_LONG) + ClassSize.align(ClassSize.OBJECT + (26 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) + (5 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN)); public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD @@ -2606,37 +2608,37 @@ public class HStore implements Store { @Override public long getFlushedCellsCount() { - return flushedCellsCount; + return flushedCellsCount.get(); } @Override public long getFlushedCellsSize() { - return flushedCellsSize; + return flushedCellsSize.get(); } @Override public long getFlushedOutputFileSize() { - return flushedOutputFileSize; + return flushedOutputFileSize.get(); } @Override public long getCompactedCellsCount() { - return compactedCellsCount; + return compactedCellsCount.get(); } @Override public long getCompactedCellsSize() { - return compactedCellsSize; + return compactedCellsSize.get(); } @Override public long getMajorCompactedCellsCount() { - return majorCompactedCellsCount; + return majorCompactedCellsCount.get(); } @Override public long getMajorCompactedCellsSize() { - return majorCompactedCellsSize; + return majorCompactedCellsSize.get(); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java index ff6811002f..61bf0c9b77 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java @@ -91,7 +91,7 @@ public class IncreasingToUpperBoundRegionSplitPolicy extends ConstantSizeRegionS } } - return foundABigStore | force; + return foundABigStore || force; } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java index a255b8521f..2a9418247b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java @@ -65,5 +65,6 @@ public interface InternalScanner extends Closeable { * Closes the scanner and releases any resources it has allocated * @throws IOException */ + @Override void close() throws IOException; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java index 30dc2c1883..c0ba844d45 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java @@ -97,6 +97,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner } } + @Override public Cell peek() { if (this.current == null) { return null; @@ -104,6 +105,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner return this.current.peek(); } + @Override public Cell next() throws IOException { if(this.current == null) { return null; @@ -180,6 +182,8 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner public KVScannerComparator(KVComparator kvComparator) { this.kvComparator = kvComparator; } + + @Override public int compare(KeyValueScanner left, KeyValueScanner right) { int comparison = compare(left.peek(), right.peek()); if (comparison != 0) { @@ -208,6 +212,7 @@ public class KeyValueHeap extends NonReversedNonLazyKeyValueScanner } } + @Override public void close() { if (this.current != null) { this.current.close(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java index eb8bd06504..02ebd972cf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java @@ -288,11 +288,13 @@ public class Leases extends HasThread { return this.leaseName.hashCode(); } + @Override public long getDelay(TimeUnit unit) { return unit.convert(this.expirationTime - EnvironmentEdgeManager.currentTime(), TimeUnit.MILLISECONDS); } + @Override public int compareTo(Delayed o) { long delta = this.getDelay(TimeUnit.MILLISECONDS) - o.getDelay(TimeUnit.MILLISECONDS); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java index 0e5f2847a0..246c02cf58 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java @@ -37,10 +37,6 @@ import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.HasThread; -import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.ReentrantLock; - import com.google.common.annotations.VisibleForTesting; /** @@ -54,6 +50,8 @@ import com.google.common.annotations.VisibleForTesting; */ @InterfaceAudience.Private @VisibleForTesting +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Use of an atomic type both as monitor and condition variable is intended") public class LogRoller extends HasThread { private static final Log LOG = LogFactory.getLog(LogRoller.class); private final ReentrantLock rollLock = new ReentrantLock(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java index 3bca175684..2da782e0a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java @@ -29,6 +29,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.concurrent.BlockingQueue; @@ -700,8 +701,13 @@ class MemStoreFlusher implements FlushRequester { } @Override + public int hashCode() { + return System.identityHashCode(this); + } + + @Override public boolean equals(Object obj) { - return (this == obj); + return Objects.equals(this, obj); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java index 57d6356d1d..cc68b03fcd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConcurrencyControl.java @@ -76,7 +76,7 @@ public class MultiVersionConcurrencyControl { while (true) { long seqId = this.getWritePoint(); if (seqId >= newStartPoint) break; - if (this.tryAdvanceTo(/* newSeqId = */ newStartPoint, /* expected = */ seqId)) break; + if (this.tryAdvanceTo(newStartPoint, seqId)) break; } } @@ -245,6 +245,7 @@ public class MultiVersionConcurrencyControl { } @VisibleForTesting + @Override public String toString() { return Objects.toStringHelper(this) .add("readPoint", readPoint) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 266b4f3c74..597c665863 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -1057,10 +1057,9 @@ public class RSRpcServices implements HBaseRPCErrorHandler, Class rpcSchedulerFactoryClass = rs.conf.getClass( REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS, SimpleRpcSchedulerFactory.class); - rpcSchedulerFactory = ((RpcSchedulerFactory) rpcSchedulerFactoryClass.newInstance()); - } catch (InstantiationException e) { - throw new IllegalArgumentException(e); - } catch (IllegalAccessException e) { + rpcSchedulerFactory = (RpcSchedulerFactory) + rpcSchedulerFactoryClass.getDeclaredConstructor().newInstance(); + } catch (Exception e) { throw new IllegalArgumentException(e); } // Server to handle client requests. @@ -1651,6 +1650,9 @@ public class RSRpcServices implements HBaseRPCErrorHandler, */ @Override @QosPriority(priority=HConstants.ADMIN_QOS) + @edu.umd.cs.findbugs.annotations.SuppressWarnings( + value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="We double up use of an atomic both as monitor and condition variable") public OpenRegionResponse openRegion(final RpcController controller, final OpenRegionRequest request) throws ServiceException { requestCount.increment(); @@ -2581,7 +2583,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, private static final long serialVersionUID = -4305297078988180130L; @Override - public Throwable fillInStackTrace() { + public synchronized Throwable fillInStackTrace() { return this; } }; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index dbe8521ef1..f29397d6b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.ipc.RpcServer; import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.regionserver.DeleteTracker; import org.apache.hadoop.hbase.regionserver.Region.Operation; @@ -88,11 +87,6 @@ import com.google.common.collect.Lists; import com.google.protobuf.Message; import com.google.protobuf.Service; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.protobuf.Message; -import com.google.protobuf.Service; - /** * Implements the coprocessor environment and runtime support for coprocessors * loaded within a {@link Region}. @@ -160,6 +154,7 @@ public class RegionCoprocessorHost return rsServices; } + @Override public void shutdown() { super.shutdown(); MetricsCoprocessor.removeRegistry(this.metricRegistry); @@ -525,6 +520,7 @@ public class RegionCoprocessorHost throws IOException { oserver.postClose(ctx, abortRequested); } + @Override public void postEnvCall(RegionEnvironment env) { shutdown(env); } @@ -1704,10 +1700,12 @@ public class RegionCoprocessorHost public abstract void call(RegionObserver observer, ObserverContext ctx) throws IOException; + @Override public boolean hasCall(Coprocessor observer) { return observer instanceof RegionObserver; } + @Override public void call(Coprocessor observer, ObserverContext ctx) throws IOException { call((RegionObserver)observer, ctx); @@ -1724,10 +1722,12 @@ public class RegionCoprocessorHost public abstract void call(EndpointObserver observer, ObserverContext ctx) throws IOException; + @Override public boolean hasCall(Coprocessor observer) { return observer instanceof EndpointObserver; } + @Override public void call(Coprocessor observer, ObserverContext ctx) throws IOException { call((EndpointObserver)observer, ctx); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java index 6831c91977..77d0e35cbb 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ScannerContext.java @@ -425,8 +425,8 @@ public class ScannerContext { TIME_LIMIT_REACHED_MID_ROW(true, true), BATCH_LIMIT_REACHED(true, true); - private boolean moreValues; - private boolean limitReached; + private final boolean moreValues; + private final boolean limitReached; private NextState(boolean moreValues, boolean limitReached) { this.moreValues = moreValues; @@ -482,7 +482,7 @@ public class ScannerContext { * limits, the checker must know their own scope (i.e. are they checking the limits between * rows, between cells, etc...) */ - int depth; + final int depth; LimitScope(int depth) { this.depth = depth; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java index 874acb2d01..1115528f62 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java @@ -83,9 +83,9 @@ public class SplitLogWorker implements Runnable { } public SplitLogWorker(final Server hserver, final Configuration conf, - final RegionServerServices server, final LastSequenceId sequenceIdChecker, + final RegionServerServices rsServices, final LastSequenceId sequenceIdChecker, final WALFactory factory) { - this(server, conf, server, new TaskExecutor() { + this(hserver, conf, rsServices, new TaskExecutor() { @Override public Status exec(String filename, RecoveryMode mode, CancelableProgressable p) { Path walDir; @@ -102,7 +102,7 @@ public class SplitLogWorker implements Runnable { // encountered a bad non-retry-able persistent error. try { if (!WALSplitter.splitLogFile(walDir, fs.getFileStatus(new Path(walDir, filename)), - fs, conf, p, sequenceIdChecker, server.getCoordinatedStateManager(), mode, factory)) { + fs, conf, p, sequenceIdChecker, rsServices.getCoordinatedStateManager(), mode, factory)) { return Status.PREEMPTED; } } catch (InterruptedIOException iioe) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java index 3576478d3b..a3eea6dd2b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransactionImpl.java @@ -170,6 +170,7 @@ public class SplitTransactionImpl implements SplitTransaction { * @return true if the region is splittable else * false if it is not (e.g. its already closed, etc.). */ + @Override public boolean prepare() throws IOException { if (!this.parent.isSplittable()) return false; // Split key can be null if this region is unsplittable; i.e. has refs. @@ -886,6 +887,7 @@ public class SplitTransactionImpl implements SplitTransaction { this.family = family; } + @Override public Pair call() throws IOException { return splitStoreFile(family, sf); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index 28c08926c7..c27cf40404 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -1045,6 +1045,7 @@ public class StoreFile { } } + @Override public void append(final Cell cell) throws IOException { appendGeneralBloomfilter(cell); appendDeleteFamilyBloomFilter(cell); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java index 08259de092..12da6b71cf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java @@ -502,37 +502,49 @@ public class StoreFileInfo { } @Override - public boolean equals(Object that) { - if (this == that) return true; - if (that == null) return false; - - if (!(that instanceof StoreFileInfo)) return false; - - StoreFileInfo o = (StoreFileInfo)that; - if (initialPath != null && o.initialPath == null) return false; - if (initialPath == null && o.initialPath != null) return false; - if (initialPath != o.initialPath && initialPath != null - && !initialPath.equals(o.initialPath)) return false; - - if (reference != null && o.reference == null) return false; - if (reference == null && o.reference != null) return false; - if (reference != o.reference && reference != null - && !reference.equals(o.reference)) return false; - - if (link != null && o.link == null) return false; - if (link == null && o.link != null) return false; - if (link != o.link && link != null && !link.equals(o.link)) return false; - - return true; - }; - + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((initialPath == null) ? 0 : initialPath.hashCode()); + result = prime * result + ((link == null) ? 0 : link.hashCode()); + result = prime * result + ((reference == null) ? 0 : reference.hashCode()); + return result; + } @Override - public int hashCode() { - int hash = 17; - hash = hash * 31 + ((reference == null) ? 0 : reference.hashCode()); - hash = hash * 31 + ((initialPath == null) ? 0 : initialPath.hashCode()); - hash = hash * 31 + ((link == null) ? 0 : link.hashCode()); - return hash; + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + StoreFileInfo other = (StoreFileInfo) obj; + if (initialPath == null) { + if (other.initialPath != null) { + return false; + } + } else if (!initialPath.equals(other.initialPath)) { + return false; + } + if (link == null) { + if (other.link != null) { + return false; + } + } else if (!link.equals(other.link)) { + return false; + } + if (reference == null) { + if (other.reference != null) { + return false; + } + } else if (!reference.equals(other.reference)) { + return false; + } + return true; } + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java index f5eb74f3c6..8132365625 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java @@ -150,14 +150,17 @@ public class StoreFileScanner implements KeyValueScanner { matcher, readPt, true); } + @Override public String toString() { return "StoreFileScanner[" + hfs.toString() + ", cur=" + cur + "]"; } + @Override public Cell peek() { return cur; } + @Override public Cell next() throws IOException { Cell retKey = cur; @@ -178,6 +181,7 @@ public class StoreFileScanner implements KeyValueScanner { return retKey; } + @Override public boolean seek(Cell key) throws IOException { if (seekCount != null) seekCount.incrementAndGet(); @@ -205,6 +209,7 @@ public class StoreFileScanner implements KeyValueScanner { } } + @Override public boolean reseek(Cell key) throws IOException { if (seekCount != null) seekCount.incrementAndGet(); @@ -263,6 +268,7 @@ public class StoreFileScanner implements KeyValueScanner { return true; } + @Override public void close() { cur = null; if (closed) return; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java index 9b2a56aea7..12cc7bf8a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java @@ -157,11 +157,13 @@ public class TimeRangeTracker implements Writable { return maximumTimestamp.get(); } + @Override public void write(final DataOutput out) throws IOException { out.writeLong(minimumTimestamp.get()); out.writeLong(maximumTimestamp.get()); } + @Override public void readFields(final DataInput in) throws IOException { this.minimumTimestamp.set(in.readLong()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java index 12a84ebef6..9d67af5130 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java @@ -26,9 +26,8 @@ import com.google.common.collect.Collections2; import java.util.ArrayList; import java.util.Collection; +import java.util.Objects; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.regionserver.Store; @@ -43,7 +42,7 @@ import org.apache.hadoop.util.StringUtils; @InterfaceAudience.LimitedPrivate({ "coprocessor" }) @InterfaceStability.Evolving public class CompactionRequest implements Comparable { - private static final Log LOG = LogFactory.getLog(CompactionRequest.class); + // was this compaction promoted to an off-peak private boolean isOffPeak = false; private enum DisplayCompactionType { MINOR, ALL_FILES, MAJOR } @@ -142,8 +141,13 @@ public class CompactionRequest implements Comparable { } @Override + public int hashCode() { + return System.identityHashCode(this); + } + + @Override public boolean equals(Object obj) { - return (this == obj); + return Objects.equals(this, obj); } public Collection getFiles() { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java index 2d8772c079..9e11ecfc75 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/RatioBasedCompactionPolicy.java @@ -207,6 +207,7 @@ public class RatioBasedCompactionPolicy extends SortedCompactionPolicy { * @param filesCompacting files being scheduled to compact. * @return true to schedule a request. */ + @Override public boolean needsCompaction(final Collection storeFiles, final List filesCompacting) { int numCandidates = storeFiles.size() - filesCompacting.size(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java index 77b0af8d48..4e8c35a5e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/SortedCompactionPolicy.java @@ -106,6 +106,7 @@ public abstract class SortedCompactionPolicy extends CompactionPolicy { * @param filesToCompact Files to compact. Can be null. * @return True if we should run a major compaction. */ + @Override public abstract boolean shouldPerformMajorCompaction(final Collection filesToCompact) throws IOException; @@ -148,6 +149,7 @@ public abstract class SortedCompactionPolicy extends CompactionPolicy { * @param compactionSize Total size of some compaction * @return whether this should be a large or small compaction */ + @Override public boolean throttleCompaction(long compactionSize) { return compactionSize > comConf.getThrottlePoint(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java index 69786b620f..d64b7898f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java @@ -43,6 +43,8 @@ import org.apache.hadoop.hbase.util.ConfigUtil; * This is executed after receiving an OPEN RPC from the master or client. */ @InterfaceAudience.Private +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Use of an atomic type both as monitor and condition variable is intended") public class OpenRegionHandler extends EventHandler { private static final Log LOG = LogFactory.getLog(OpenRegionHandler.class); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index d5cf6bbd5a..593132fec1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -1323,7 +1323,7 @@ public class FSHLog implements WAL { rollWriterLock.unlock(); } try { - if (lowReplication || writer != null && writer.getLength() > logrollsize) { + if (lowReplication || (writer != null && writer.getLength() > logrollsize)) { requestLogRoll(lowReplication); } } catch (IOException e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java index 69d1c59708..69c0db7776 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java @@ -81,6 +81,7 @@ class FSWALEntry extends Entry { } } + @Override public String toString() { return "sequence=" + this.sequence + ", " + super.toString(); }; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index 9fd171f516..564317406a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -100,10 +100,12 @@ public class ProtobufLogReader extends ReaderBase { public long trailerSize() { if (trailerPresent) { // sizeof PB_WAL_COMPLETE_MAGIC + sizof trailerSize + trailer - final long calculatedSize = PB_WAL_COMPLETE_MAGIC.length + Bytes.SIZEOF_INT + trailer.getSerializedSize(); + final long calculatedSize = (long) PB_WAL_COMPLETE_MAGIC.length + Bytes.SIZEOF_INT + + trailer.getSerializedSize(); final long expectedSize = fileLength - walEditsStopOffset; if (expectedSize != calculatedSize) { - LOG.warn("After parsing the trailer, we expect the total footer to be "+ expectedSize +" bytes, but we calculate it as being " + calculatedSize); + LOG.warn("After parsing the trailer, we expect the total footer to be "+ expectedSize + + " bytes, but we calculate it as being " + calculatedSize); } return expectedSize; } else { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.java index 25c2111e2e..deb9959516 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/RingBufferTruck.java @@ -109,6 +109,7 @@ class RingBufferTruck { * Factory for making a bunch of these. Needed by the ringbuffer/disruptor. */ final static EventFactory EVENT_FACTORY = new EventFactory() { + @Override public RingBufferTruck newInstance() { return new RingBufferTruck(); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java index 3278f0cfa5..10f2e7b5cf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java @@ -589,7 +589,7 @@ public class ReplicationSource extends Thread implements ReplicationSourceInterf if (replicationQueueInfo.isQueueRecovered() && getWorkerState() == WorkerState.FINISHED) { // use synchronize to make sure one last thread will clean the queue - synchronized (workerThreads) { + synchronized (this) { Threads.sleep(100);// wait a short while for other worker thread to fully exit boolean allOtherTaskDone = true; for (ReplicationSourceShipperThread worker : workerThreads.values()) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java index 8ce1437674..8660bd2bbd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java @@ -334,6 +334,7 @@ public class AuthenticationTokenSecretManager interrupt(); } + @Override public void run() { zkLeader.start(); zkLeader.waitToBecomeLeader(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java index db3caffb0b..4a34f5b808 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/ExpressionParser.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.security.visibility; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Stack; @@ -103,7 +104,8 @@ public class ExpressionParser { } index++; } while (index < endPos && !isEndOfLabel(exp[index])); - leafExp = new String(exp, labelOffset, index - labelOffset).trim(); + leafExp = new String(exp, labelOffset, index - labelOffset, + StandardCharsets.UTF_8).trim(); if (leafExp.isEmpty()) { throw new ParseException("Error parsing expression " + expS + " at column : " + index); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java index a15669f4e0..1ace4c01b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/LeafExpressionNode.java @@ -58,6 +58,7 @@ public class LeafExpressionNode implements ExpressionNode { return true; } + @Override public LeafExpressionNode deepClone() { LeafExpressionNode clone = new LeafExpressionNode(this.identifier); return clone; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java index 4399ecc55f..e926045041 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/NonLeafExpressionNode.java @@ -91,6 +91,7 @@ public class NonLeafExpressionNode implements ExpressionNode { return this.op == Operator.NOT; } + @Override public NonLeafExpressionNode deepClone() { NonLeafExpressionNode clone = new NonLeafExpressionNode(this.op); for (ExpressionNode exp : this.childExps) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java index 1025ca9d88..ace34ae30c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/expression/Operator.java @@ -23,12 +23,13 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; public enum Operator { AND('&'), OR('|'), NOT('!'); - private char rep; + private final char rep; private Operator(char rep) { this.rep = rep; } + @Override public String toString() { return String.valueOf(this.rep); }; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java index 37a02bd444..082418954c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java @@ -573,6 +573,7 @@ public class ExportSnapshot extends Configured implements Tool { final List> files, final int ngroups) { // Sort files by size, from small to big Collections.sort(files, new Comparator>() { + @Override public int compare(Pair a, Pair b) { long r = a.getSecond() - b.getSecond(); return (r < 0) ? -1 : ((r > 0) ? 1 : 0); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java index 47b3c3474e..75dac43f3b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java @@ -749,7 +749,7 @@ public class RestoreSnapshotHelper { public static void restoreSnapshotACL(SnapshotDescription snapshot, TableName newTableName, Configuration conf) throws IOException { - if (snapshot.hasUsersAndPermissions() && snapshot.getUsersAndPermissions() != null) { + if (snapshot.hasUsersAndPermissions()) { LOG.info("Restore snapshot acl to table. snapshot: " + snapshot + ", table: " + newTableName); ListMultimap perms = ProtobufUtil.toUserTablePermissions(snapshot.getUsersAndPermissions()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java index 06eb9ea23d..7e161cab10 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java @@ -382,7 +382,7 @@ public class FSTableDescriptors implements TableDescriptors { // Clean away old versions for (FileStatus file : status) { Path path = file.getPath(); - if (file != mostCurrent) { + if (!file.equals(mostCurrent)) { if (!fs.delete(file.getPath(), false)) { LOG.warn("Failed cleanup of " + path); } else { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 3e2d230263..5d850b01cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -1078,7 +1078,7 @@ public abstract class FSUtils { private static boolean isValidWALRootDir(Path walDir, final Configuration c) throws IOException { Path rootDir = FSUtils.getRootDir(c); - if (walDir != rootDir) { + if (!walDir.equals(rootDir)) { if (walDir.toString().startsWith(rootDir.toString() + "/")) { throw new IllegalStateException("Illegal WAL directory specified. " + "WAL directories are not permitted to be under the root directory if set."); @@ -1343,6 +1343,7 @@ public abstract class FSUtils { super(fs, HConstants.HBASE_NON_TABLE_DIRS); } + @Override protected boolean isValidName(final String name) { if (!super.isValidName(name)) return false; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index e5e4b5ae4e..066f2b3130 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -574,10 +574,12 @@ public class HBaseFsck extends Configured implements Closeable { errors.print("Number of regions: " + status.getRegionsCount()); Set rits = status.getRegionsInTransition(); - errors.print("Number of regions in transition: " + rits.size()); - if (details) { - for (RegionState state: rits) { - errors.print(" " + state.toDescriptiveString()); + if (rits != null) { + errors.print("Number of regions in transition: " + rits.size()); + if (details) { + for (RegionState state: rits) { + errors.print(" " + state.toDescriptiveString()); + } } } @@ -3798,7 +3800,7 @@ public class HBaseFsck extends Configured implements Closeable { @Override public int hashCode() { int hash = Arrays.hashCode(getRegionName()); - hash ^= getRegionId(); + hash = (int) (hash ^ getRegionId()); hash ^= Arrays.hashCode(getStartKey()); hash ^= Arrays.hashCode(getEndKey()); hash ^= Boolean.valueOf(isOffline()).hashCode(); @@ -3806,7 +3808,7 @@ public class HBaseFsck extends Configured implements Closeable { if (regionServer != null) { hash ^= regionServer.hashCode(); } - hash ^= modTime; + hash = (int) (hash ^ modTime); return hash; } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java index 98ce80d219..f55f8cf641 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/IdReadWriteLock.java @@ -80,6 +80,9 @@ public class IdReadWriteLock { } @VisibleForTesting + @edu.umd.cs.findbugs.annotations.SuppressWarnings( + value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Synchronization on rwlock is intentional") public void waitForWaiters(long id, int numWaiters) throws InterruptedException { for (ReentrantReadWriteLock readWriteLock;;) { readWriteLock = lockPool.get(id); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java index 0739e91ba7..a05d7cfcd3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java @@ -16,12 +16,15 @@ */ package org.apache.hadoop.hbase.util; +import java.io.BufferedWriter; import java.io.Closeable; import java.io.IOException; +import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.ManagementFactory; import java.lang.reflect.Array; +import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.Set; @@ -371,7 +374,8 @@ public class JSONBean { * @throws MalformedObjectNameException */ public static void dumpAllBeans() throws IOException, MalformedObjectNameException { - try (PrintWriter writer = new PrintWriter(System.out)) { + try (PrintWriter writer = new PrintWriter(new BufferedWriter( + new OutputStreamWriter(System.out, StandardCharsets.UTF_8)))) { JSONBean dumper = new JSONBean(); try (JSONBean.Writer jsonBeanWriter = dumper.open(writer)) { MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java index d0f01f8918..89a1c568ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java @@ -1086,6 +1086,8 @@ public class RegionSplitter { } @Override + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH", + justification="Preconditions checks insure we are not going to dereference a null value") public byte[][] split(int numRegions) { Preconditions.checkArgument( Bytes.compareTo(lastRowBytes, firstRowBytes) > 0, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DefaultWALProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DefaultWALProvider.java index 3fa38b93a1..53a9681e7b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DefaultWALProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DefaultWALProvider.java @@ -18,9 +18,6 @@ */ package org.apache.hadoop.hbase.wal; -import java.io.Closeable; -import java.io.DataInput; -import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -382,7 +379,7 @@ public class DefaultWALProvider implements WALProvider { ProtobufLogWriter.class, Writer.class); Writer writer = null; try { - writer = logWriterClass.newInstance(); + writer = logWriterClass.getDeclaredConstructor().newInstance(); writer.init(fs, path, conf, overwritable); return writer; } catch (Exception e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java index 9e90a0c3f9..2e34b643db 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WAL.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.Map; import java.util.Set; -import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; @@ -38,7 +36,6 @@ import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; -import org.apache.hadoop.hbase.util.Bytes; import com.google.common.annotations.VisibleForTesting; @@ -103,6 +100,7 @@ public interface WAL extends Closeable { * underlying resources after this call; i.e. filesystem based WALs can archive or * delete files. */ + @Override void close() throws IOException; /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java index 5452742800..f5723a7785 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALFactory.java @@ -79,7 +79,8 @@ public class WALFactory { filesystem(DefaultWALProvider.class), multiwal(RegionGroupingProvider.class); - Class clazz; + final Class clazz; + Providers(Class clazz) { this.clazz = clazz; } @@ -142,17 +143,13 @@ public class WALFactory { List listeners, String providerId) throws IOException { LOG.info("Instantiating WALProvider of type " + clazz); try { - final WALProvider result = clazz.newInstance(); + final WALProvider result = clazz.getDeclaredConstructor().newInstance(); result.init(this, conf, listeners, providerId); return result; - } catch (InstantiationException exception) { - LOG.error("couldn't set up WALProvider, the configured class is " + clazz); - LOG.debug("Exception details for failure to load WALProvider.", exception); - throw new IOException("couldn't set up WALProvider", exception); - } catch (IllegalAccessException exception) { + } catch (Exception e) { LOG.error("couldn't set up WALProvider, the configured class is " + clazz); - LOG.debug("Exception details for failure to load WALProvider.", exception); - throw new IOException("couldn't set up WALProvider", exception); + LOG.debug("Exception details for failure to load WALProvider.", e); + throw new IOException("couldn't set up WALProvider", e); } } @@ -299,7 +296,7 @@ public class WALFactory { try { if (lrClass != ProtobufLogReader.class) { // User is overriding the WAL reader, let them. - reader = lrClass.newInstance(); + reader = lrClass.getDeclaredConstructor().newInstance(); reader.init(fs, path, conf, null); return reader; } else { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java index 5cc7567235..4eb79b32e9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALKey.java @@ -476,8 +476,8 @@ public class WALKey implements SequenceId, Comparable { @Override public int hashCode() { int result = Bytes.hashCode(this.encodedRegionName); - result ^= this.logSeqNum; - result ^= this.writeTime; + result = (int) (result ^ this.logSeqNum); + result = (int) (result ^ this.writeTime); return result; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java index 005e948089..7c74649771 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java @@ -132,6 +132,8 @@ import com.google.protobuf.TextFormat; * region to replay on startup. Delete the old log files when finished. */ @InterfaceAudience.Private +@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="JLM_JSR166_UTILCONCURRENT_MONITORENTER", + justification="Synchronization on concurrent map is intended") public class WALSplitter { private static final Log LOG = LogFactory.getLog(WALSplitter.class); @@ -1145,7 +1147,7 @@ public class WALSplitter { protected PipelineController controller; protected EntryBuffers entryBuffers; - protected Map writers = Collections + protected final Map writers = Collections .synchronizedMap(new TreeMap(Bytes.BYTES_COMPARATOR));; protected final Map regionMaximumEditLogSeqNum = Collections @@ -1710,21 +1712,21 @@ public class WALSplitter { private long waitRegionOnlineTimeOut; private final Set recoveredRegions = Collections.synchronizedSet(new HashSet()); - private final Map writers = + private final Map rsWriters = new ConcurrentHashMap(); // online encoded region name -> region location map private final Map onlineRegions = new ConcurrentHashMap(); - private Map tableNameToHConnectionMap = Collections + private final Map tableNameToHConnectionMap = Collections .synchronizedMap(new TreeMap()); /** * Map key -> value layout * : -> Queue */ - private Map>> serverToBufferQueueMap = + private final Map>> serverToBufferQueueMap = new ConcurrentHashMap>>(); - private List thrown = new ArrayList(); + private final List thrown = new ArrayList(); // The following sink is used in distrubitedLogReplay mode for entries of regions in a disabling // table. It's a limitation of distributedLogReplay. Because log replay needs a region is @@ -2124,7 +2126,7 @@ public class WALSplitter { @Override int getNumOpenWriters() { - return this.writers.size() + this.logRecoveredEditsOutputSink.getNumOpenWriters(); + return this.rsWriters.size() + this.logRecoveredEditsOutputSink.getNumOpenWriters(); } private List closeRegionServerWriters() throws IOException { @@ -2146,8 +2148,8 @@ public class WALSplitter { } } } finally { - synchronized (writers) { - for (Map.Entry entry : writers.entrySet()) { + synchronized (rsWriters) { + for (Map.Entry entry : rsWriters.entrySet()) { String locationKey = entry.getKey(); RegionServerWriter tmpW = entry.getValue(); try { @@ -2182,8 +2184,8 @@ public class WALSplitter { @Override public Map getOutputCounts() { TreeMap ret = new TreeMap(Bytes.BYTES_COMPARATOR); - synchronized (writers) { - for (Map.Entry entry : writers.entrySet()) { + synchronized (rsWriters) { + for (Map.Entry entry : rsWriters.entrySet()) { ret.put(Bytes.toBytes(entry.getKey()), entry.getValue().editsWritten); } } @@ -2201,7 +2203,7 @@ public class WALSplitter { * @return null if this region shouldn't output any logs */ private RegionServerWriter getRegionServerWriter(String loc) throws IOException { - RegionServerWriter ret = writers.get(loc); + RegionServerWriter ret = rsWriters.get(loc); if (ret != null) { return ret; } @@ -2212,11 +2214,11 @@ public class WALSplitter { } HConnection hconn = getConnectionByTableName(tableName); - synchronized (writers) { - ret = writers.get(loc); + synchronized (rsWriters) { + ret = rsWriters.get(loc); if (ret == null) { ret = new RegionServerWriter(conf, tableName, hconn); - writers.put(loc, ret); + rsWriters.put(loc, ret); } } return ret; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java index 6cd5a286b5..4fbce90da7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java @@ -28,6 +28,7 @@ import java.io.Reader; import java.net.BindException; import java.net.InetSocketAddress; import java.net.Socket; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Random; @@ -405,7 +406,7 @@ public class MiniZooKeeperCluster { Socket sock = new Socket("localhost", port); try { OutputStream outstream = sock.getOutputStream(); - outstream.write("stat".getBytes()); + outstream.write("stat".getBytes(StandardCharsets.UTF_8)); outstream.flush(); } finally { sock.close(); @@ -435,10 +436,10 @@ public class MiniZooKeeperCluster { BufferedReader reader = null; try { OutputStream outstream = sock.getOutputStream(); - outstream.write("stat".getBytes()); + outstream.write("stat".getBytes(StandardCharsets.UTF_8)); outstream.flush(); - Reader isr = new InputStreamReader(sock.getInputStream()); + Reader isr = new InputStreamReader(sock.getInputStream(), StandardCharsets.UTF_8); reader = new BufferedReader(isr); String line = reader.readLine(); if (line != null && line.startsWith("Zookeeper version:")) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java index 1d2f39477f..0fe60e7ed0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java @@ -51,7 +51,7 @@ import org.apache.zookeeper.KeeperException; @InterfaceAudience.Private public class RegionServerTracker extends ZooKeeperListener { private static final Log LOG = LogFactory.getLog(RegionServerTracker.class); - private NavigableMap regionServers = + private final NavigableMap regionServers = new TreeMap(); private ServerManager serverManager; private MasterServices server; diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp index dceed8ee1d..ad7996b5ad 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp @@ -112,30 +112,32 @@ - <%for (SnapshotDescription snapshotDesc : snapshots) { %> - - - <% - TableName snapshotTable = TableName.valueOf(snapshotDesc.getTable()); - SnapshotInfo.SnapshotStats stats = SnapshotInfo.getSnapshotStats(master.getConfiguration(), - snapshotDesc, filesMap); - totalUnsharedArchivedSize += stats.getNonSharedArchivedStoreFilesSize(); - tableExists = admin.tableExists(snapshotTable); - %> - + + <% + TableName snapshotTable = TableName.valueOf(snapshotDesc.getTable()); + SnapshotInfo.SnapshotStats stats = SnapshotInfo.getSnapshotStats(master.getConfiguration(), + snapshotDesc, filesMap); + totalUnsharedArchivedSize += stats.getNonSharedArchivedStoreFilesSize(); + tableExists = admin.tableExists(snapshotTable); + %> + + + + + <% } %> - - - - - <% } %>

<%= snapshots.size() %> snapshot(s) in set.

Total Storefile Size: <%= StringUtils.humanReadableInt(totalSize) %>

diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp index 19e66e1b35..86a5a76337 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp @@ -22,6 +22,7 @@ import="com.google.protobuf.ByteString" import="java.net.URLEncoder" import="java.util.ArrayList" + import="java.util.HashMap" import="java.util.TreeMap" import="java.util.List" import="java.util.LinkedHashMap" @@ -252,8 +253,14 @@ if ( fqtn != null ) { } %> + <% + String metaLocationString = metaLocation != null ? + StringEscapeUtils.escapeHtml(metaLocation.getHostname().toString()) + + ":" + master.getRegionServerInfoPort(metaLocation) : + "(null)"; + %> - + @@ -355,8 +362,11 @@ if ( fqtn != null ) { String urlRegionServer = null; Map regDistribution = new TreeMap(); Map primaryRegDistribution = new TreeMap(); - Map regions = table.getRegionLocations(); Map regionsToLoad = new LinkedHashMap(); + Map regions = table.getRegionLocations(); + if (regions == null) { + regions = new HashMap(); + } for (Map.Entry hriEntry : regions.entrySet()) { HRegionInfo regionInfo = hriEntry.getKey(); ServerName addr = hriEntry.getValue(); @@ -811,7 +821,7 @@ var showWhole='<%= showWhole %>'; if(showWhole=='true')document.getElementById("showWhole").checked=true; function reloadAsSort(){ - var url="?name="+'<%= URLEncoder.encode(fqtn) %>'; + var url="?name="+'<%= fqtn != null ? URLEncoder.encode(fqtn) : "" %>'; if(document.getElementById("sel").selectedIndex>0){ url=url+"&sort="+document.getElementById("sel").value; } diff --git a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp index cd35ad1fcf..04d54a59dd 100644 --- a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp @@ -37,7 +37,7 @@ Configuration conf = rs.getConfiguration(); Region region = rs.getFromOnlineRegions(regionName); - String displayName = region.getRegionInfo().getRegionNameAsString(); + String displayName = region != null ? region.getRegionInfo().getRegionNameAsString() : "(null)"; %>
Shared Storefile Size Archived Storefile Size
- <%= snapshotDesc.getName() %> - <% if (tableExists) { %> - - <%= snapshotTable.getNameAsString() %> - <% } else { %> - <%= snapshotTable.getNameAsString() %> + <% if (snapshots != null) { %> + <% for (SnapshotDescription snapshotDesc : snapshots) { %> +
+ <%= snapshotDesc.getName() %> + <% if (tableExists) { %> + + <%= snapshotTable.getNameAsString() %> + <% } else { %> + <%= snapshotTable.getNameAsString() %> + <% } %> + <%= new Date(snapshotDesc.getCreationTime()) %><%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %><%= StringUtils.humanReadableInt(stats.getArchivedStoreFileSize()) %> + (<%= StringUtils.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)
<%= new Date(snapshotDesc.getCreationTime()) %><%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %><%= StringUtils.humanReadableInt(stats.getArchivedStoreFileSize()) %> - (<%= StringUtils.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)
<%= escapeXml(meta.getRegionNameAsString()) %><%= StringEscapeUtils.escapeHtml(metaLocation.getHostname().toString()) + ":" + master.getRegionServerInfoPort(metaLocation) %><%= metaLocationString %> <%= readReq%> <%= writeReq%> <%= fileSize%>