From defa5c140c90ee459f017d5a6d8d35acf0264b60 Mon Sep 17 00:00:00 2001 From: John Leach Date: Tue, 27 Jun 2017 13:45:30 -0500 Subject: [PATCH] HBASE-18279 Fix Manual Array to Collection Copy --- .../java/org/apache/hadoop/hbase/client/Result.java | 4 +--- .../apache/hadoop/hbase/util/AbstractHBaseTool.java | 9 ++------- .../hbase/rest/filter/RestCsrfPreventionFilter.java | 9 ++------- .../hadoop/hbase/backup/impl/BackupSystemTable.java | 19 +++---------------- .../hadoop/hbase/io/hfile/BlockCachesIterator.java | 12 +++++++----- .../hadoop/hbase/mapreduce/HFileInputFormat.java | 5 ++--- .../apache/hadoop/hbase/regionserver/wal/FSHLog.java | 4 +--- 7 files changed, 18 insertions(+), 44 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java index 94e1b908aa..7eb0406e6c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java @@ -837,9 +837,7 @@ public class Result implements CellScannable, CellScanner { } prevRow = currentRow; stale = stale || r.isStale(); - for (Cell c : r.rawCells()) { - cells.add(c); - } + Collections.addAll(cells, r.rawCells()); } return Result.create(cells, null, stale); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java index 29d10ae20c..512a33fcd9 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java @@ -17,10 +17,7 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; +import java.util.*; import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.CommandLine; @@ -122,9 +119,7 @@ public abstract class AbstractHBaseTool implements Tool, Configurable { CommandLine cmd; List argsList = new ArrayList<>(args.length); - for (String arg : args) { - argsList.add(arg); - } + Collections.addAll(argsList, args); // For backward compatibility of args which can't be parsed as Option. See javadoc for // processOldArgs(..) processOldArgs(argsList); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java index 013da3fa15..c5e880c0dd 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/RestCsrfPreventionFilter.java @@ -18,10 +18,7 @@ package org.apache.hadoop.hbase.rest.filter; import java.io.IOException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -101,9 +98,7 @@ public class RestCsrfPreventionFilter implements Filter { void parseMethodsToIgnore(String mti) { String[] methods = mti.split(","); methodsToIgnore = new HashSet<>(); - for (int i = 0; i < methods.length; i++) { - methodsToIgnore.add(methods[i]); - } + Collections.addAll(methodsToIgnore, methods); } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java index e5a3daace2..06503a24a6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java @@ -19,17 +19,8 @@ package org.apache.hadoop.hbase.backup.impl; import java.io.Closeable; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Set; -import java.util.TreeMap; -import java.util.TreeSet; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -1230,9 +1221,7 @@ public final class BackupSystemTable implements Closeable { private String[] merge(String[] tables, String[] newTables) { List list = new ArrayList(); // Add all from tables - for (String t : tables) { - list.add(t); - } + Collections.addAll(list, tables); for (String nt : newTables) { if (list.contains(nt)) continue; list.add(nt); @@ -1289,9 +1278,7 @@ public final class BackupSystemTable implements Closeable { private String[] disjoin(String[] tables, String[] toRemove) { List list = new ArrayList(); // Add all from tables - for (String t : tables) { - list.add(t); - } + Collections.addAll(list, tables); for (String nt : toRemove) { if (list.contains(nt)) { list.remove(nt); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java index 8cd9e57d7c..3cd44a0cd2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCachesIterator.java @@ -39,11 +39,13 @@ class BlockCachesIterator implements Iterator { @Override public boolean hasNext() { - if (current.hasNext()) return true; - this.index++; - if (this.index >= this.bcs.length) return false; - this.current = this.bcs[this.index].iterator(); - return hasNext(); + while (true) { + if (current.hasNext()) return true; + this.index++; + if (this.index >= this.bcs.length) return false; + this.current = this.bcs[this.index].iterator(); + + } } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileInputFormat.java index e90d5c1cb9..f345197419 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileInputFormat.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import org.apache.hadoop.conf.Configuration; @@ -150,9 +151,7 @@ public class HFileInputFormat extends FileInputFormat { for (FileStatus status : super.listStatus(job)) { if (status.isDirectory()) { FileSystem fs = status.getPath().getFileSystem(job.getConfiguration()); - for (FileStatus match : fs.listStatus(status.getPath(), HIDDEN_FILE_FILTER)) { - result.add(match); - } + Collections.addAll(result, fs.listStatus(status.getPath(), HIDDEN_FILE_FILTER)); } else { result.add(status); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index 77ac1d1ab0..7307d9e88d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -495,9 +495,7 @@ public class FSHLog extends AbstractFSWAL { void offer(final long sequence, final SyncFuture[] syncFutures, final int syncFutureCount) { // Set sequence first because the add to the queue will wake the thread if sleeping. this.sequence = sequence; - for (int i = 0; i < syncFutureCount; ++i) { - this.syncFutures.add(syncFutures[i]); - } + this.syncFutures.addAll(Arrays.asList(syncFutures).subList(0, syncFutureCount)); } /** -- 2.13.0