diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 17ca37a3fc..b7e817711d 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -1176,13 +1176,14 @@ public class HColumnDescriptor implements WritableComparable boolean hasConfigKeys = false; // print all reserved keys first - for (ImmutableBytesWritable k : values.keySet()) { + for (Map.Entry entry : values.entrySet()) { + ImmutableBytesWritable k = entry.getKey(); if (!RESERVED_KEYWORDS.contains(k)) { hasConfigKeys = true; continue; } String key = Bytes.toString(k.get()); - String value = Bytes.toStringBinary(values.get(k).get()); + String value = Bytes.toStringBinary(entry.getValue().get()); if (printDefaults || !DEFAULT_VALUES.containsKey(key) || !DEFAULT_VALUES.get(key).equalsIgnoreCase(value)) { @@ -1199,12 +1200,13 @@ public class HColumnDescriptor implements WritableComparable s.append(HConstants.METADATA).append(" => "); s.append('{'); boolean printComma = false; - for (ImmutableBytesWritable k : values.keySet()) { + for (Map.Entry entry : values.entrySet()) { + ImmutableBytesWritable k = entry.getKey(); if (RESERVED_KEYWORDS.contains(k)) { continue; } String key = Bytes.toString(k.get()); - String value = Bytes.toStringBinary(values.get(k).get()); + String value = Bytes.toStringBinary(entry.getValue().get()); if (printComma) { s.append(", "); } diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index b1e62f0e20..0eb0d20fb2 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -949,7 +949,8 @@ public class HTableDescriptor implements WritableComparable { // step 1: set partitioning and pruning Set reservedKeys = new TreeSet(); Set userKeys = new TreeSet(); - for (ImmutableBytesWritable k : values.keySet()) { + for (Map.Entry entry : values.entrySet()) { + ImmutableBytesWritable k = entry.getKey(); if (k == null || k.get() == null) continue; String key = Bytes.toString(k.get()); // in this section, print out reserved keywords + coprocessor info @@ -958,7 +959,7 @@ public class HTableDescriptor implements WritableComparable { continue; } // only print out IS_ROOT/IS_META if true - String value = Bytes.toString(values.get(k).get()); + String value = Bytes.toString(entry.getValue().get()); if (key.equalsIgnoreCase(IS_ROOT) || key.equalsIgnoreCase(IS_META)) { if (Boolean.valueOf(value) == false) continue; } @@ -1166,8 +1167,7 @@ public class HTableDescriptor implements WritableComparable { result = families.size() - other.families.size(); } if (result == 0 && families.size() != other.families.size()) { - result = Integer.valueOf(families.size()).compareTo( - Integer.valueOf(other.families.size())); + result = Integer.compare(families.size(), other.families.size()); } if (result == 0) { for (Iterator it = families.values().iterator(), diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java index 44e8322a96..6420e37c29 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java @@ -85,6 +85,8 @@ public class AsyncRpcChannel { private static final int MAX_SASL_RETRIES = 5; + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MS_MUTABLE_COLLECTION_PKGPROTECT", + justification="the rest of the system which live in the different package can use") protected final static Map> tokenHandlers = new HashMap<>(); diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java index 26a71a3459..0260176163 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java @@ -119,6 +119,8 @@ public class RpcClientImpl extends AbstractRpcClient { protected final SocketFactory socketFactory; // how to create sockets + @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="MS_MUTABLE_COLLECTION_PKGPROTECT", + justification="the rest of the system which live in the different package can use") protected final static Map> tokenHandlers = new HashMap SASL_PROPS = new TreeMap(); public static final int SWITCH_TO_SIMPLE_AUTH = -88; diff --git hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java index 9d0319b7d2..0bb6d61d54 100644 --- hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java +++ hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java @@ -209,7 +209,7 @@ public class PoolMap implements Map { } } } - return null; + return entries; } protected interface Pool {