diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java index a8ece39584..cdab4a9157 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.backup.master; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; @@ -116,7 +117,7 @@ public class BackupLogCleaner extends BaseLogCleanerDelegate { } catch (IOException e) { LOG.error("Failed to get backup system table table, therefore will keep all files", e); // nothing to delete - return new ArrayList(); + return Collections.emptyList(); } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java index f90d9dddc6..fd6c4b7c73 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java @@ -432,7 +432,7 @@ public final class ProtobufUtil { */ public static List toTableDescriptorList(GetTableDescriptorsResponse proto) { if (proto == null) { - return new ArrayList<>(); + return Collections.emptyList(); } return proto.getTableSchemaList().stream().map(ProtobufUtil::toTableDescriptor) .collect(Collectors.toList()); @@ -445,7 +445,9 @@ public final class ProtobufUtil { */ public static List toTableDescriptorList(ListTableDescriptorsByNamespaceResponse proto) { - if (proto == null) return new ArrayList<>(); + if (proto == null) { + return Collections.emptyList(); + } return proto.getTableSchemaList().stream().map(ProtobufUtil::toTableDescriptor) .collect(Collectors.toList()); } @@ -2259,7 +2261,7 @@ public final class ProtobufUtil { public static List toTableNameList(List tableNamesList) { if (tableNamesList == null) { - return new ArrayList<>(); + return Collections.emptyList(); } return tableNamesList.stream().map(ProtobufUtil::toTableName).collect(Collectors.toList()); } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java index 90f3bf38e7..1d92e95a47 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ByteRangeUtils.java @@ -22,6 +22,8 @@ import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; +import java.util.List; import org.apache.yetus.audience.InterfaceAudience; diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java index 3033f18f3a..c5d8c47df7 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/RESTServer.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.rest; import java.lang.management.ManagementFactory; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.EnumSet; @@ -116,7 +117,7 @@ public class RESTServer implements Constants { String name, String defaultValue) { String valueString = conf.get(name, defaultValue); if (valueString == null) { - return new ArrayList<>(); + return Collections.emptyList(); } return new ArrayList<>(StringUtils.getTrimmedStringCollection(valueString)); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index 3b7e8fffa5..50040669cd 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -1631,9 +1631,10 @@ public class HMaster extends HRegionServer implements MasterServices { } ServerName dest; - List exclude = hri.getTable().isSystemTable() ? assignmentManager.getExcludedServersForSystemTable() - : new ArrayList<>(1); - if (destServerName != null && exclude.contains(ServerName.valueOf(Bytes.toString(destServerName)))) { + Collection exclude = hri.getTable().isSystemTable() ? + assignmentManager.getExcludedServersForSystemTable() : new ArrayList<>(1); + if (destServerName != null && + exclude.contains(ServerName.valueOf(Bytes.toString(destServerName)))) { LOG.info( Bytes.toString(encodedRegionName) + " can not move to " + Bytes.toString(destServerName) + " because the server is in exclude list"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java index 3db60335d8..6362ee1dac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ServerManager.java @@ -23,6 +23,7 @@ import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import java.io.IOException; import java.net.InetAddress; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -985,7 +986,8 @@ public class ServerManager { * the draining or dying servers. * @param serversToExclude can be null if there is no server to exclude */ - public List createDestinationServersList(final List serversToExclude){ + public List createDestinationServersList( + final Collection serversToExclude){ final List destServers = getOnlineServersList(); if (serversToExclude != null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java index 1b8e757754..787b020d1c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java @@ -491,17 +491,14 @@ public class AssignmentManager implements ServerListener { // handling. continue; } - List regionsShouldMove = getCarryingSystemTables(server); - if (!regionsShouldMove.isEmpty()) { - for (RegionInfo regionInfo : regionsShouldMove) { - // null value for dest forces destination server to be selected by balancer - RegionPlan plan = new RegionPlan(regionInfo, server, null); - if (regionInfo.isMetaRegion()) { - // Must move meta region first. - moveAsync(plan); - } else { - plans.add(plan); - } + for (RegionInfo regionInfo : getCarryingSystemTables(server)) { + // null value for dest forces destination server to be selected by balancer + RegionPlan plan = new RegionPlan(regionInfo, server, null); + if (regionInfo.isMetaRegion()) { + // Must move meta region first. + moveAsync(plan); + } else { + plans.add(plan); } } for (RegionPlan plan : plans) { @@ -515,10 +512,10 @@ public class AssignmentManager implements ServerListener { }).start(); } - private List getCarryingSystemTables(ServerName serverName) { + private Iterable getCarryingSystemTables(ServerName serverName) { Set regions = this.getRegionStates().getServerNode(serverName).getRegions(); if (regions == null) { - return new ArrayList<>(); + return Collections.emptyList(); } return regions.stream() .map(RegionStateNode::getRegionInfo) @@ -740,7 +737,7 @@ public class AssignmentManager implements ServerListener { public MoveRegionProcedure createMoveRegionProcedure(final RegionPlan plan) { if (plan.getRegionInfo().getTable().isSystemTable()) { - List exclude = getExcludedServersForSystemTable(); + Collection exclude = getExcludedServersForSystemTable(); if (plan.getDestination() != null && exclude.contains(plan.getDestination())) { try { LOG.info("Can not move " + plan.getRegionInfo() + " to " + plan.getDestination() @@ -1749,7 +1746,7 @@ public class AssignmentManager implements ServerListener { if (!sysRRList.isEmpty()) { // system table regions requiring reassignment are present, get region servers // not available for system table regions - final List excludeServers = getExcludedServersForSystemTable(); + final Collection excludeServers = getExcludedServersForSystemTable(); List serversForSysTables = servers.stream() .filter(s -> !excludeServers.contains(s)).collect(Collectors.toList()); if (serversForSysTables.isEmpty()) { @@ -1847,13 +1844,13 @@ public class AssignmentManager implements ServerListener { * Get a list of servers that this region can not assign to. * For system table, we must assign them to a server with highest version. */ - public List getExcludedServersForSystemTable() { + public Collection getExcludedServersForSystemTable() { List> serverList = master.getServerManager().getOnlineServersList() .stream() .map((s)->new Pair<>(s, master.getRegionServerVersion(s))) .collect(Collectors.toList()); if (serverList.isEmpty()) { - return new ArrayList<>(); + return Collections.emptyList(); } String highestVersion = Collections.max(serverList, (o1, o2) -> VersionInfo.compareVersion(o1.getSecond(), o2.getSecond())).getSecond(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java index a8dd9aeb7f..b9d62eb118 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.master.balancer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.Deque; import java.util.HashMap; @@ -71,7 +72,7 @@ public abstract class BaseLoadBalancer implements LoadBalancer { protected static final int MIN_SERVER_BALANCE = 2; private volatile boolean stopped = false; - private static final List EMPTY_REGION_LIST = new ArrayList<>(0); + private static final List EMPTY_REGION_LIST = Collections.emptyList(); static final Predicate IDLE_SERVER_PREDICATOR = load -> load.getNumberOfRegions() == 0; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java index da691b7960..e33a9f2f15 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; -import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.UUID; @@ -57,7 +57,7 @@ implements RowProcessor { @Override public List getClusterIds() { - return new ArrayList<>(); + return Collections.emptyList(); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java index ecd980a9c8..4201ade2bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFileManager.java @@ -996,7 +996,9 @@ public class StripeStoreFileManager @Override public List getStripeBoundaries() { - if (this.state.stripeFiles.isEmpty()) return new ArrayList<>(); + if (this.state.stripeFiles.isEmpty()) { + return Collections.emptyList(); + } ArrayList result = new ArrayList<>(this.state.stripeEndRows.length + 2); result.add(OPEN_KEY); Collections.addAll(result, this.state.stripeEndRows); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java index 31208c1361..46e1914a2f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java @@ -27,6 +27,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -1227,8 +1228,9 @@ public final class Canary implements Tool { * Canary entry point for specified table. * @throws Exception */ - private static List> sniff(final Admin admin, final Sink sink, String tableName, - ExecutorService executor, TaskType taskType, boolean rawScanEnabled, LongAdder readLatency) throws Exception { + private static Collection> sniff(final Admin admin, final Sink sink, + String tableName, ExecutorService executor, TaskType taskType, boolean rawScanEnabled, + LongAdder readLatency) throws Exception { if (LOG.isDebugEnabled()) { LOG.debug(String.format("checking table is enabled and getting table descriptor for table %s", tableName)); @@ -1245,7 +1247,7 @@ public final class Canary implements Tool { /* * Loops over regions that owns this table, and output some information about the state. */ - private static List> sniff(final Admin admin, final Sink sink, + private static Collection> sniff(final Admin admin, final Sink sink, HTableDescriptor tableDesc, ExecutorService executor, TaskType taskType, boolean rawScanEnabled, LongAdder rwLatency) throws Exception { @@ -1257,7 +1259,7 @@ public final class Canary implements Tool { try { table = admin.getConnection().getTable(tableDesc.getTableName()); } catch (TableNotFoundException e) { - return new ArrayList<>(); + return Collections.emptyList(); } finally { if (table !=null) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index c0b72aa40f..0e8c77a4b0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -18,6 +18,7 @@ */ package org.apache.hadoop.hbase.util; +import java.util.Collection; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.collect.Iterators; @@ -1018,7 +1019,7 @@ public abstract class FSUtils extends CommonFSUtils { // assumes we are in a table dir. List rds = listStatusWithStatusFilter(fs, tableDir, new RegionDirFilter(fs)); if (rds == null) { - return new ArrayList<>(); + return Collections.emptyList(); } List regionDirs = new ArrayList<>(rds.size()); for (FileStatus rdfs: rds) { @@ -1078,10 +1079,11 @@ public abstract class FSUtils extends CommonFSUtils { return familyDirs; } - public static List getReferenceFilePaths(final FileSystem fs, final Path familyDir) throws IOException { + public static Collection getReferenceFilePaths(final FileSystem fs, final Path familyDir) + throws IOException { List fds = listStatusWithStatusFilter(fs, familyDir, new ReferenceFileFilter(fs)); if (fds == null) { - return new ArrayList<>(); + return Collections.emptyList(); } List referenceFiles = new ArrayList<>(fds.size()); for (FileStatus fdfs: fds) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index 81ff495822..4be368c32f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -2432,7 +2432,7 @@ public class HBaseFsck extends Configured implements Closeable { FileSystem fs = regionDir.getFileSystem(getConf()); List familyDirs = FSUtils.getFamilyDirs(fs, regionDir); for (Path familyDir : familyDirs) { - List referenceFilePaths = FSUtils.getReferenceFilePaths(fs, familyDir); + Iterable referenceFilePaths = FSUtils.getReferenceFilePaths(fs, familyDir); for (Path referenceFilePath : referenceFilePaths) { Path parentRegionDir = StoreFileInfo.getReferredToFile(referenceFilePath).getParent().getParent(); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java index 583a9e9bb8..c070165213 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java @@ -1521,7 +1521,7 @@ public class ThriftServerRunner implements Runnable { try { results = resultScannerWrapper.getScanner().next(nbRows); if (null == results) { - return new ArrayList<>(); + return Collections.emptyList(); } } catch (IOException e) { LOG.warn(e.getMessage(), e);