diff --git common/pom.xml common/pom.xml index 91d9ce0055..b1afcbc17c 100644 --- common/pom.xml +++ common/pom.xml @@ -12,6 +12,7 @@ See the License for the specific language governing permissions and limitations under the License. --> + diff --git common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java index ff9ad60583..dd79d2c3ff 100644 --- common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java +++ common/src/java/org/apache/hadoop/hive/ant/GenHiveTemplate.java @@ -124,7 +124,7 @@ private String normalize(String description) { int prev = 0; StringBuilder builder = new StringBuilder(description.length() << 1); for (;index > 0; index = description.indexOf('\n', prev = index + 1)) { - builder.append("\n ").append(description.substring(prev, index)); + builder.append("\n ").append(description, prev, index); } if (prev < description.length()) { builder.append("\n ").append(description.substring(prev)); diff --git common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java index d98632ef87..ca925e35ea 100644 --- common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java +++ common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java @@ -18,17 +18,6 @@ package org.apache.hadoop.hive.common; -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.LinkedList; -import java.util.List; - import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -42,6 +31,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.LinkedList; +import java.util.List; + /** * This class contains methods used for the purposes of compression, this class * should not be accessed from code run in Hadoop. @@ -66,16 +66,13 @@ public static void tar(String parentDir, String[] inputFiles, String outputFile) TarArchiveOutputStream tOut = new TarArchiveOutputStream( new GzipCompressorOutputStream(new BufferedOutputStream(out))); - for (int i = 0; i < inputFiles.length; i++) { - File f = new File(parentDir, inputFiles[i]); + for (String inputFile : inputFiles) { + File f = new File(parentDir, inputFile); TarArchiveEntry tarEntry = new TarArchiveEntry(f, f.getName()); tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); tOut.putArchiveEntry(tarEntry); - FileInputStream input = new FileInputStream(f); - try { + try (FileInputStream input = new FileInputStream(f)) { IOUtils.copy(input, tOut); // copy with 8K buffer, not close - } finally { - input.close(); } tOut.closeArchiveEntry(); } @@ -91,14 +88,11 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) ZipOutputStream output = null; try { output = new ZipOutputStream(new FileOutputStream(new File(parentDir, outputFile))); - for (int i = 0; i < inputFiles.length; i++) { - File f = new File(parentDir, inputFiles[i]); - FileInputStream input = new FileInputStream(f); - output.putNextEntry(new ZipEntry(inputFiles[i])); - try { + for (String inputFile : inputFiles) { + File f = new File(parentDir, inputFile); + try (FileInputStream input = new FileInputStream(f)) { + output.putNextEntry(new ZipEntry(inputFile)); IOUtils.copy(input, output); - } finally { - input.close(); } } } finally { @@ -145,7 +139,7 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) File inputFile = new File(inputFileName); File outputDir = new File(outputDirName); - final List untaredFiles = new LinkedList(); + final List untaredFiles = new LinkedList<>(); final InputStream is; if (inputFileName.endsWith(".gz")) { @@ -156,7 +150,7 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) final TarArchiveInputStream debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is); - TarArchiveEntry entry = null; + TarArchiveEntry entry; while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) { final File outputFile = new File(outputDir, entry.getName()); if (!outputFile.toPath().toAbsolutePath().normalize() diff --git common/src/java/org/apache/hadoop/hive/common/CopyOnFirstWriteProperties.java common/src/java/org/apache/hadoop/hive/common/CopyOnFirstWriteProperties.java index 60d9e679b4..c6543de832 100644 --- common/src/java/org/apache/hadoop/hive/common/CopyOnFirstWriteProperties.java +++ common/src/java/org/apache/hadoop/hive/common/CopyOnFirstWriteProperties.java @@ -49,8 +49,8 @@ private Properties interned; - private static Interner INTERNER = Interners.newWeakInterner(); - private static Field defaultsField; + private static final Interner INTERNER = Interners.newWeakInterner(); + private static final Field defaultsField; static { try { defaultsField = Properties.class.getDeclaredField("defaults"); diff --git common/src/java/org/apache/hadoop/hive/common/FileUtils.java common/src/java/org/apache/hadoop/hive/common/FileUtils.java index cd7a7e6eaa..1524d84f65 100644 --- common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -18,28 +18,7 @@ package org.apache.hadoop.hive.common; -import java.io.EOFException; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.ByteBuffer; -import java.security.AccessControlException; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.BitSet; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Random; -import java.util.Set; - import com.google.common.annotations.VisibleForTesting; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; @@ -52,9 +31,6 @@ import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.conf.HiveConfUtil; -import org.apache.hadoop.hive.io.HdfsUtils; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.Utils; @@ -64,6 +40,26 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.EOFException; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.ByteBuffer; +import java.security.AccessControlException; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.BitSet; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Random; +import java.util.Set; + /** * Collection of file manipulation utilities common across Hive. */ @@ -73,20 +69,14 @@ public static final int MAX_IO_ERROR_RETRY = 5; public static final int IO_ERROR_SLEEP_TIME = 100; - public static final PathFilter HIDDEN_FILES_PATH_FILTER = new PathFilter() { - @Override - public boolean accept(Path p) { - String name = p.getName(); - return !name.startsWith("_") && !name.startsWith("."); - } + public static final PathFilter HIDDEN_FILES_PATH_FILTER = p -> { + String name = p.getName(); + return !name.startsWith("_") && !name.startsWith("."); }; - public static final PathFilter STAGING_DIR_PATH_FILTER = new PathFilter() { - @Override - public boolean accept(Path p) { - String name = p.getName(); - return !name.startsWith("."); - } + public static final PathFilter STAGING_DIR_PATH_FILTER = p -> { + String name = p.getName(); + return !name.startsWith("."); }; /** @@ -230,7 +220,7 @@ public static String makeListBucketingDirName(List lbCols, List // the partition with a hive version that now escapes the special char using // the list below, then the drop partition fails to work. - static BitSet charToEscape = new BitSet(128); + static final BitSet charToEscape = new BitSet(128); static { for (char c = 0; c < ' '; c++) { charToEscape.set(c); @@ -254,7 +244,7 @@ public static String makeListBucketingDirName(List lbCols, List } static boolean needsEscaping(char c) { - return c >= 0 && c < charToEscape.size() && charToEscape.get(c); + return c < charToEscape.size() && charToEscape.get(c); } public static String escapePathName(String path) { @@ -300,7 +290,7 @@ public static String unescapePathName(String path) { for (int i = 0; i < path.length(); i++) { char c = path.charAt(i); if (c == '%' && i + 2 < path.length()) { - int code = -1; + int code; try { code = Integer.parseInt(path.substring(i + 1, i + 3), 16); } catch (Exception e) { @@ -366,7 +356,7 @@ public static FileStatus getPathOrParentThatExists(FileSystem fs, Path path) thr public static void checkFileAccessWithImpersonation(final FileSystem fs, final FileStatus stat, final FsAction action, final String user) - throws IOException, AccessControlException, InterruptedException, Exception { + throws Exception { checkFileAccessWithImpersonation(fs, stat, action, user, null); } @@ -408,14 +398,11 @@ public static void checkFileAccessWithImpersonation(final FileSystem fs, UserGroupInformation proxyUser = UserGroupInformation.createProxyUser( user, UserGroupInformation.getLoginUser()); try { - proxyUser.doAs(new PrivilegedExceptionAction() { - @Override - public Object run() throws Exception { - FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf()); - ShimLoader.getHadoopShims().checkFileAccess(fsAsUser, stat, action); - addChildren(fsAsUser, stat.getPath(), children); - return null; - } + proxyUser.doAs((PrivilegedExceptionAction) () -> { + FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf()); + ShimLoader.getHadoopShims().checkFileAccess(fsAsUser, stat, action); + addChildren(fsAsUser, stat.getPath(), children); + return null; }); } finally { FileSystem.closeAllForUGI(proxyUser); @@ -464,7 +451,7 @@ public static boolean isActionPermittedForFileHierarchy(FileSystem fs, FileStatu List subDirsToCheck = null; if (isDir && recurse) { - subDirsToCheck = new ArrayList(); + subDirsToCheck = new ArrayList<>(); } try { @@ -516,7 +503,7 @@ public static boolean isLocalFile(HiveConf conf, URI fileUri) { try { // do best effort to determine if this is a local file FileSystem fsForFile = FileSystem.get(fileUri, conf); - return LocalFileSystem.class.isInstance(fsForFile); + return fsForFile instanceof LocalFileSystem; } catch (IOException e) { LOG.warn("Unable to get FileSystem for " + fileUri, e); } @@ -533,12 +520,9 @@ public static boolean isOwnerOfFileHierarchy(final FileSystem fs, UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(userName, UserGroupInformation.getLoginUser()); try { - boolean isOwner = proxyUser.doAs(new PrivilegedExceptionAction() { - @Override - public Boolean run() throws Exception { - FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf()); - return checkIsOwnerOfFileHierarchy(fsAsUser, fileStatus, userName, recurse); - } + boolean isOwner = proxyUser.doAs((PrivilegedExceptionAction) () -> { + FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf()); + return checkIsOwnerOfFileHierarchy(fsAsUser, fileStatus, userName, recurse); }); return isOwner; } finally { @@ -638,7 +622,7 @@ static boolean copy(FileSystem srcFS, Path src, public static boolean distCp(FileSystem srcFS, List srcPaths, Path dst, boolean deleteSource, String doAsUser, HiveConf conf, HadoopShims shims) throws IOException { - boolean copied = false; + boolean copied; if (doAsUser == null){ copied = shims.runDistCp(srcPaths, dst, conf); } else { @@ -668,7 +652,7 @@ public static boolean distCp(FileSystem srcFS, List srcPaths, Path dst, public static boolean moveToTrash(FileSystem fs, Path f, Configuration conf, boolean purge) throws IOException { LOG.debug("deleting " + f); - boolean result = false; + boolean result; try { if(purge) { LOG.debug("purge is set to true. Not moving to Trash " + f); @@ -999,7 +983,7 @@ public static URI getURI(String path) throws URISyntaxException { * @return the list of the file names in the format of URI formats. */ public static Set getJarFilesByPath(String pathString, Configuration conf) { - Set result = new HashSet(); + Set result = new HashSet<>(); if (pathString == null || org.apache.commons.lang.StringUtils.isBlank(pathString)) { return result; } @@ -1038,7 +1022,7 @@ public static URI getURI(String path) throws URISyntaxException { * @throws EOFException the length bytes cannot be read. The buffer position is not modified. */ public static void readFully(InputStream stream, int length, ByteBuffer bb) throws IOException { - byte[] b = null; + byte[] b; int offset = 0; if (bb.hasArray()) { b = bb.array(); diff --git common/src/java/org/apache/hadoop/hive/common/GcTimeMonitor.java common/src/java/org/apache/hadoop/hive/common/GcTimeMonitor.java index edba6f9ad6..bac6326386 100644 --- common/src/java/org/apache/hadoop/hive/common/GcTimeMonitor.java +++ common/src/java/org/apache/hadoop/hive/common/GcTimeMonitor.java @@ -44,7 +44,9 @@ // Ring buffers containing GC timings and timestamps when timings were taken private final TsAndData[] gcDataBuf; - private int bufSize, startIdx, endIdx; + private final int bufSize; + private int startIdx; + private int endIdx; private long startTimeNanos; private final GcData curData = new GcData(); @@ -234,18 +236,12 @@ void setValues(long tsNanos, long gcPauseNanos) { * This main function just leaks memory. Running this class will quickly * result in a "GC hell" and subsequent alerts from the GcTimeMonitor. */ - public static void main(String []args) throws Exception { - new GcTimeMonitor(20 * 1000, 500, 20, - new GcTimeMonitor.GcTimeAlertHandler() { - @Override - public void alert(GcData gcData) { - System.err.println( - "GcTimeMonitor alert. Current GC time percentage = " + - gcData.getGcTimePercentage() + - ", total run time = " + (gcData.getGcMonitorRunTimeMs() / 1000) + " sec" + - ", total GC time = " + (gcData.getAccumulatedGcTimeMs() / 1000) + " sec"); - } - }).start(); + @SuppressWarnings("InfiniteLoopStatement") public static void main(String []args) throws Exception { + new GcTimeMonitor(20 * 1000, 500, 20, gcData -> System.err.println( + "GcTimeMonitor alert. Current GC time percentage = " + + gcData.getGcTimePercentage() + + ", total run time = " + (gcData.getGcMonitorRunTimeMs() / 1000) + " sec" + + ", total GC time = " + (gcData.getAccumulatedGcTimeMs() / 1000) + " sec")).start(); List list = Lists.newArrayList(); for (int i = 0; ; i++) { diff --git common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java index 42286be9d8..4649957fa5 100644 --- common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java +++ common/src/java/org/apache/hadoop/hive/common/HeapMemoryMonitor.java @@ -18,6 +18,10 @@ package org.apache.hadoop.hive.common; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.management.NotificationEmitter; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.lang.management.MemoryNotificationInfo; @@ -27,11 +31,6 @@ import java.util.ArrayList; import java.util.List; -import javax.management.NotificationEmitter; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Class that monitors memory usage and notifies the listeners when a certain of threshold of memory is used * after GC (collection usage). @@ -43,7 +42,7 @@ private static final MemoryPoolMXBean tenuredGenPool = getTenuredGenPool(); private final double threshold; - private List listeners = new ArrayList<>(); + private final List listeners = new ArrayList<>(); public interface Listener { void memoryUsageAboveThreshold(long usedMemory, long maxMemory); diff --git common/src/java/org/apache/hadoop/hive/common/HiveInterruptUtils.java common/src/java/org/apache/hadoop/hive/common/HiveInterruptUtils.java index 1ccc1189ed..a48961307b 100644 --- common/src/java/org/apache/hadoop/hive/common/HiveInterruptUtils.java +++ common/src/java/org/apache/hadoop/hive/common/HiveInterruptUtils.java @@ -26,7 +26,7 @@ /** * A list of currently running comments that needs cleanup when the command is canceled */ - private static List interruptCallbacks = new ArrayList(); + private static final List interruptCallbacks = new ArrayList<>(); public static HiveInterruptCallback add(HiveInterruptCallback command) { synchronized (interruptCallbacks) { @@ -47,7 +47,7 @@ public static HiveInterruptCallback remove(HiveInterruptCallback command) { */ public static void interrupt() { synchronized (interruptCallbacks) { - for (HiveInterruptCallback resource : new ArrayList(interruptCallbacks)) { + for (HiveInterruptCallback resource : new ArrayList<>(interruptCallbacks)) { resource.interrupt(); } } diff --git common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java index 09343e5616..38de47ab91 100644 --- common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java +++ common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java @@ -17,11 +17,7 @@ */ package org.apache.hadoop.hive.common; -import java.io.IOException; -import java.util.ArrayList; -import java.util.LinkedList; -import java.util.List; - +import com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -31,7 +27,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; /** * HiveStatsUtils. @@ -70,7 +69,7 @@ // if level is <0, the return all files/directories under the specified path if (level < 0) { - List result = new ArrayList(); + List result = new ArrayList<>(); try { FileStatus fileStatus = fs.getFileStatus(path); FileUtils.listStatusRecursively(fs, fileStatus, filter, result); @@ -94,7 +93,7 @@ return Lists.newArrayList(fs.globStatus(pathPattern, filter)); } LinkedList queue = new LinkedList<>(); - List results = new ArrayList(); + List results = new ArrayList<>(); for (FileStatus status : fs.globStatus(pathPattern)) { if (filter.accept(status.getPath())) { results.add(status); diff --git common/src/java/org/apache/hadoop/hive/common/JavaUtils.java common/src/java/org/apache/hadoop/hive/common/JavaUtils.java index c011cd1626..c317406c79 100644 --- common/src/java/org/apache/hadoop/hive/common/JavaUtils.java +++ common/src/java/org/apache/hadoop/hive/common/JavaUtils.java @@ -18,6 +18,9 @@ package org.apache.hadoop.hive.common; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; @@ -28,10 +31,6 @@ import java.util.Arrays; import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Collection of Java class loading/reflection related utilities common across * Hive. @@ -116,8 +115,7 @@ public static void closeClassLoader(ClassLoader loader) throws IOException { } else if (SUN_MISC_UTIL_RELEASE != null && loader instanceof URLClassLoader) { PrintStream outputStream = System.out; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - PrintStream newOutputStream = new PrintStream(byteArrayOutputStream); - try { + try (PrintStream newOutputStream = new PrintStream(byteArrayOutputStream)) { // SUN_MISC_UTIL_RELEASE.invoke prints to System.out // So we're changing the outputstream for that call, // and setting it back to original System.out when we're done @@ -127,15 +125,13 @@ public static void closeClassLoader(ClassLoader loader) throws IOException { LOG.debug(output); } catch (InvocationTargetException e) { if (e.getTargetException() instanceof IOException) { - throw (IOException)e.getTargetException(); + throw (IOException) e.getTargetException(); } throw new IOException(e.getTargetException()); } catch (Exception e) { throw new IOException(e); - } - finally { + } finally { System.setOut(outputStream); - newOutputStream.close(); } } } diff --git common/src/java/org/apache/hadoop/hive/common/JvmMetrics.java common/src/java/org/apache/hadoop/hive/common/JvmMetrics.java index b758abe0c6..d29db1771d 100644 --- common/src/java/org/apache/hadoop/hive/common/JvmMetrics.java +++ common/src/java/org/apache/hadoop/hive/common/JvmMetrics.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.common; -import static org.apache.hadoop.hive.common.JvmMetricsInfo.*; - import org.apache.hadoop.log.metrics.EventCounter; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsInfo; @@ -38,6 +36,29 @@ import java.util.List; import java.util.concurrent.ConcurrentHashMap; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.GcCount; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.GcNumInfoThresholdExceeded; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.GcNumWarnThresholdExceeded; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.GcTimeMillis; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.GcTotalExtraSleepTime; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.JvmMetrics; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.LogError; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.LogFatal; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.LogInfo; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.LogWarn; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemHeapCommittedM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemHeapMaxM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemHeapUsedM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemMaxM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemNonHeapCommittedM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemNonHeapMaxM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.MemNonHeapUsedM; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.ThreadsBlocked; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.ThreadsNew; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.ThreadsRunnable; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.ThreadsTerminated; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.ThreadsTimedWaiting; +import static org.apache.hadoop.hive.common.JvmMetricsInfo.ThreadsWaiting; import static org.apache.hadoop.metrics2.impl.MsInfo.ProcessName; import static org.apache.hadoop.metrics2.impl.MsInfo.SessionId; @@ -67,8 +88,7 @@ synchronized JvmMetrics init(String processName, String sessionId) { final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean(); final String processName, sessionId; private JvmPauseMonitor pauseMonitor = null; - final ConcurrentHashMap gcInfoCache = - new ConcurrentHashMap(); + final ConcurrentHashMap gcInfoCache = new ConcurrentHashMap<>(); JvmMetrics(String processName, String sessionId) { this.processName = processName; diff --git common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java index 76dca0493f..0e2698537f 100644 --- common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java +++ common/src/java/org/apache/hadoop/hive/common/JvmMetricsInfo.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.common; import com.google.common.base.MoreObjects; -import com.google.common.base.Objects; - import org.apache.hadoop.metrics2.MetricsInfo; /** diff --git common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index 3c988da310..cf6e9ca9bb 100644 --- common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -23,7 +23,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.metrics.common.Metrics; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; @@ -221,7 +220,7 @@ private void incrementMetricsCounter(String name, long count) { * with a 1GB heap will very quickly go into "GC hell" and result in * log messages about the GC pauses. */ - public static void main(String []args) throws Exception { + @SuppressWarnings("InfiniteLoopStatement") public static void main(String []args) throws Exception { new JvmPauseMonitor(new Configuration()).start(); List list = Lists.newArrayList(); int i = 0; diff --git common/src/java/org/apache/hadoop/hive/common/LogUtils.java common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 874a3e1274..bb23260779 100644 --- common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -18,20 +18,15 @@ package org.apache.hadoop.hive.common; -import java.io.File; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.net.URL; -import java.util.Map; - +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.FileAppender; import org.apache.logging.log4j.core.appender.RollingFileAppender; -import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender; import org.apache.logging.log4j.core.appender.routing.RoutingAppender; import org.apache.logging.log4j.core.config.Configurator; @@ -42,7 +37,11 @@ import org.slf4j.LoggerFactory; import org.slf4j.MDC; -import com.google.common.annotations.VisibleForTesting; +import java.io.File; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.net.URL; +import java.util.Map; /** * Utilities common to logging operations. @@ -191,11 +190,11 @@ private static void logConfigLocation(HiveConf conf) throws LogInitializationExc + conf.getHiveDefaultLocation().getPath()); } // Look for hive-site.xml on the CLASSPATH and log its location if found. - if (conf.getHiveSiteLocation() == null) { + if (HiveConf.getHiveSiteLocation() == null) { l4j.warn("hive-site.xml not found on CLASSPATH"); } else { l4j.debug("Using hive-site.xml found on CLASSPATH at " - + conf.getHiveSiteLocation().getPath()); + + HiveConf.getHiveSiteLocation().getPath()); } } @@ -208,7 +207,7 @@ private static void logConfigLocation(HiveConf conf) throws LogInitializationExc */ public static String maskIfPassword(String key, String value) { if (key!=null && value!=null) { - if (key.toLowerCase().indexOf(KEY_TO_MASK_WITH) != -1) { + if (key.toLowerCase().contains(KEY_TO_MASK_WITH)) { return MASKED_VALUE; } } diff --git common/src/java/org/apache/hadoop/hive/common/ObjectPair.java common/src/java/org/apache/hadoop/hive/common/ObjectPair.java index 50b5b3f3b4..f2e41e535a 100644 --- common/src/java/org/apache/hadoop/hive/common/ObjectPair.java +++ common/src/java/org/apache/hadoop/hive/common/ObjectPair.java @@ -31,7 +31,7 @@ public ObjectPair() {} * the method does, so the code becomes less ugly. */ public static ObjectPair create(T1 f, T2 s) { - return new ObjectPair(f, s); + return new ObjectPair<>(f, s); } public ObjectPair(F first, S second) { diff --git common/src/java/org/apache/hadoop/hive/common/ServerUtils.java common/src/java/org/apache/hadoop/hive/common/ServerUtils.java index d7f4b146ed..1f1ecc933d 100644 --- common/src/java/org/apache/hadoop/hive/common/ServerUtils.java +++ common/src/java/org/apache/hadoop/hive/common/ServerUtils.java @@ -18,17 +18,17 @@ package org.apache.hadoop.hive.common; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.conf.HiveConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.net.InetAddress; import java.net.ServerSocket; import java.net.UnknownHostException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.conf.HiveConf; - /** * ServerUtils (specific to HiveServer version 1) */ diff --git common/src/java/org/apache/hadoop/hive/common/StringInternUtils.java common/src/java/org/apache/hadoop/hive/common/StringInternUtils.java index 92d37e812a..559c5d6738 100644 --- common/src/java/org/apache/hadoop/hive/common/StringInternUtils.java +++ common/src/java/org/apache/hadoop/hive/common/StringInternUtils.java @@ -34,9 +34,14 @@ // best if we could tell URI constructor to intern these strings right away. // Without this option, we can only use reflection to "fix" strings in these // fields after a URI has been created. - private static Class uriClass = URI.class; - private static Field stringField, schemeField, authorityField, hostField, pathField, - fragmentField, schemeSpecificPartField; + private static final Class uriClass = URI.class; + private static final Field stringField; + private static final Field schemeField; + private static final Field authorityField; + private static final Field hostField; + private static final Field pathField; + private static final Field fragmentField; + private static final Field schemeSpecificPartField; static { try { diff --git common/src/java/org/apache/hadoop/hive/common/UgiFactory.java common/src/java/org/apache/hadoop/hive/common/UgiFactory.java index 5b1ce60345..114c3a3482 100644 --- common/src/java/org/apache/hadoop/hive/common/UgiFactory.java +++ common/src/java/org/apache/hadoop/hive/common/UgiFactory.java @@ -14,9 +14,10 @@ package org.apache.hadoop.hive.common; -import java.io.IOException; import org.apache.hadoop.security.UserGroupInformation; +import java.io.IOException; + public interface UgiFactory { UserGroupInformation createUgi() throws IOException; } diff --git common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java index c976285f47..327d8941db 100644 --- common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java +++ common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java @@ -17,16 +17,6 @@ */ package org.apache.hadoop.hive.common.auth; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import javax.net.ssl.SSLParameters; -import javax.net.ssl.SSLServerSocket; -import javax.net.ssl.SSLSocket; - import org.apache.thrift.transport.TSSLTransportFactory; import org.apache.thrift.transport.TServerSocket; import org.apache.thrift.transport.TSocket; @@ -35,6 +25,15 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLServerSocket; +import javax.net.ssl.SSLSocket; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + /** * This class helps in some aspects of authentication. It creates the proper Thrift classes for the * given configuration as well as helps with authenticating requests. @@ -103,12 +102,12 @@ public static TServerSocket getServerSSLSocket(String hiveHost, int portNum, Str TServerSocket thriftServerSocket = TSSLTransportFactory.getServerSocket(portNum, 0, serverAddress.getAddress(), params); if (thriftServerSocket.getServerSocket() instanceof SSLServerSocket) { - List sslVersionBlacklistLocal = new ArrayList(); + List sslVersionBlacklistLocal = new ArrayList<>(); for (String sslVersion : sslVersionBlacklist) { sslVersionBlacklistLocal.add(sslVersion.trim().toLowerCase()); } SSLServerSocket sslServerSocket = (SSLServerSocket) thriftServerSocket.getServerSocket(); - List enabledProtocols = new ArrayList(); + List enabledProtocols = new ArrayList<>(); for (String protocol : sslServerSocket.getEnabledProtocols()) { if (sslVersionBlacklistLocal.contains(protocol.toLowerCase())) { LOG.debug("Disabling SSL Protocol: " + protocol); diff --git common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java index cc2ca6eb5d..361a7de933 100644 --- common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java +++ common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.common.cli; -import java.util.Properties; - import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; @@ -28,6 +26,8 @@ import org.apache.commons.cli.ParseException; import org.apache.logging.log4j.Level; +import java.util.Properties; + /** * Reusable code for Hive Cli's. *

diff --git common/src/java/org/apache/hadoop/hive/common/cli/EscapeCRLFHelper.java common/src/java/org/apache/hadoop/hive/common/cli/EscapeCRLFHelper.java index 11afd0d2ed..f194d02c8f 100644 --- common/src/java/org/apache/hadoop/hive/common/cli/EscapeCRLFHelper.java +++ common/src/java/org/apache/hadoop/hive/common/cli/EscapeCRLFHelper.java @@ -51,7 +51,7 @@ public static String escapeCRLF(String line) { if (lastNonCRLFIndex < index) { // Copy an intervening non-CRLF characters up to but not including current 'index'. - sb.append(line.substring(lastNonCRLFIndex, index)); + sb.append(line, lastNonCRLFIndex, index); } lastNonCRLFIndex = ++index; if (ch == CARRIAGE_RETURN) { @@ -71,7 +71,7 @@ public static String escapeCRLF(String line) { if (lastNonCRLFIndex < index) { // Copy an intervening non-CRLF characters up to but not including current 'index'. - sb.append(line.substring(lastNonCRLFIndex, index)); + sb.append(line, lastNonCRLFIndex, index); } return sb.toString(); } diff --git common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java index 62d85605bd..9247ff10a2 100644 --- common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java +++ common/src/java/org/apache/hadoop/hive/common/cli/HiveFileProcessor.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.common.cli; -import java.io.BufferedReader; -import java.io.IOException; - import org.apache.commons.lang.StringUtils; import org.apache.hadoop.io.IOUtils; +import java.io.BufferedReader; +import java.io.IOException; + /** * HiveFileProcessor is used for processing a file consist of Hive executable * statements @@ -72,7 +72,7 @@ protected int processReader(BufferedReader reader) throws IOException { * @return the return code of the execution result */ protected int processLine(String line) { - int lastRet = 0, ret = 0; + int lastRet = 0, ret; String command = ""; for (String oneCmd : line.split(";")) { if (StringUtils.indexOf(oneCmd, "\\") != -1) { diff --git common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java index fe6d558250..4bb522719b 100644 --- common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java +++ common/src/java/org/apache/hadoop/hive/common/cli/IHiveFileProcessor.java @@ -29,5 +29,5 @@ * @param fileName the name of the file * @exception IOException if an I/O error occurs. */ - public int processFile(String fileName) throws IOException; + int processFile(String fileName) throws IOException; } diff --git common/src/java/org/apache/hadoop/hive/common/cli/ShellCmdExecutor.java common/src/java/org/apache/hadoop/hive/common/cli/ShellCmdExecutor.java index e893270024..aa19124703 100644 --- common/src/java/org/apache/hadoop/hive/common/cli/ShellCmdExecutor.java +++ common/src/java/org/apache/hadoop/hive/common/cli/ShellCmdExecutor.java @@ -18,15 +18,15 @@ package org.apache.hadoop.hive.common.cli; +import org.apache.hive.common.util.StreamPrinter; + import java.io.IOException; import java.io.PrintStream; -import org.apache.hive.common.util.StreamPrinter; - public class ShellCmdExecutor { - private String cmd; - private PrintStream out; - private PrintStream err; + private final String cmd; + private final PrintStream out; + private final PrintStream err; public ShellCmdExecutor(String cmd, PrintStream out, PrintStream err) { this.cmd = cmd; diff --git common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java index a7c6e6ebff..11c8a47ae9 100644 --- common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java +++ common/src/java/org/apache/hadoop/hive/common/io/CachingPrintStream.java @@ -28,7 +28,7 @@ // A printStream that stores messages logged to it in a list. public class CachingPrintStream extends PrintStream { - List output = new ArrayList(); + List output = new ArrayList<>(); public CachingPrintStream(OutputStream out, boolean autoFlush, String encoding) throws FileNotFoundException, UnsupportedEncodingException { @@ -49,7 +49,7 @@ public void println(String out) { @Override public void flush() { - output = new ArrayList(); + output = new ArrayList<>(); super.flush(); } diff --git common/src/java/org/apache/hadoop/hive/common/io/SortPrintStream.java common/src/java/org/apache/hadoop/hive/common/io/SortPrintStream.java index bfcc53f713..adc587f775 100644 --- common/src/java/org/apache/hadoop/hive/common/io/SortPrintStream.java +++ common/src/java/org/apache/hadoop/hive/common/io/SortPrintStream.java @@ -25,12 +25,7 @@ public class SortPrintStream extends FetchConverter { - private static final Comparator STR_COMP = new Comparator() { - @Override - public int compare(String o1, String o2) { - return o1.compareTo(o2); - } - }; + private static final Comparator STR_COMP = Comparator.naturalOrder(); protected final MinMaxPriorityQueue outputs = MinMaxPriorityQueue.orderedBy(STR_COMP).create(); diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParser.java index c4e9007991..4dff1bb52b 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParser.java @@ -18,6 +18,10 @@ package org.apache.hadoop.hive.common.jsonexplain; +import org.json.JSONObject; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.PrintStream; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -27,11 +31,6 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.hadoop.hive.common.jsonexplain.JsonParser; -import org.json.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public abstract class DagJsonParser implements JsonParser { public final Map stages = new LinkedHashMap<>(); protected final Logger LOG; diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParserUtils.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParserUtils.java index 6ca350a2a8..f45667b5a3 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParserUtils.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/DagJsonParserUtils.java @@ -26,8 +26,7 @@ public class DagJsonParserUtils { - public static List OperatorNoStats = Arrays.asList(new String[] { "File Output Operator", - "Reduce Output Operator" }); + public static final List OperatorNoStats = Arrays.asList("File Output Operator", "Reduce Output Operator"); public static String renameReduceOutputOperator(String operatorName, Vertex vertex) { if (operatorName.equals("Reduce Output Operator") && vertex.edgeType != null) { diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java index 180cad74de..6b5146c87d 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/JsonParser.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hive.common.jsonexplain; -import java.io.PrintStream; - import org.json.JSONObject; +import java.io.PrintStream; + /** * JsonParser is the interface for classes that print a JSONObject * into outputStream. */ public interface JsonParser { - public void print(JSONObject inputObject, PrintStream outputStream) throws Exception; + void print(JSONObject inputObject, PrintStream outputStream) throws Exception; } diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java index 9603508dba..2084ad65d3 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/Op.java @@ -18,6 +18,12 @@ package org.apache.hadoop.hive.common.jsonexplain; +import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.hive.common.jsonexplain.Vertex.VertexType; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -27,18 +33,12 @@ import java.util.Map.Entry; import java.util.Set; -import org.apache.hadoop.hive.common.jsonexplain.Vertex.VertexType; -import org.json.JSONArray; -import com.google.common.annotations.VisibleForTesting; -import org.json.JSONException; -import org.json.JSONObject; - public final class Op { public final String name; // tezJsonParser public final DagJsonParser parser; public String operatorId; - public Op parent; + public final Op parent; public final List children; public final Map attrs; // the jsonObject for this operator @@ -52,7 +52,7 @@ public enum OpType { MAPJOIN, MERGEJOIN, RS, OTHERS - }; + } public Op(String name, String id, String outputVertexName, Op parent, List children, Map attrs, JSONObject opObject, Vertex vertex, DagJsonParser tezJsonParser) @@ -176,7 +176,7 @@ else if (parentVertexes.size() == 1) { JSONArray conditionMap = opObject.getJSONArray("condition map:"); for (int index = 0; index < conditionMap.length(); index++) { JSONObject cond = conditionMap.getJSONObject(index); - String k = (String) cond.keys().next(); + String k = cond.keys().next(); JSONObject condObject = new JSONObject((String)cond.get(k)); String type = condObject.getString("type"); String left = condObject.getString("left"); @@ -256,7 +256,7 @@ else if (parentVertexes.size() == 1) { JSONArray conditionMap = opObject.getJSONArray("condition map:"); for (int index = 0; index < conditionMap.length(); index++) { JSONObject cond = conditionMap.getJSONObject(index); - String k = (String) cond.keys().next(); + String k = cond.keys().next(); JSONObject condObject = new JSONObject((String)cond.get(k)); String type = condObject.getString("type"); String left = condObject.getString("left"); diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/Printer.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/Printer.java index 274fb09c54..f6cdf25006 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/Printer.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/Printer.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hive.common.jsonexplain; public final class Printer { - public static final String lineSeparator = System.getProperty("line.separator");; + public static final String lineSeparator = System.getProperty("line.separator"); private final StringBuilder builder = new StringBuilder(); public void print(String string) { diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/Stage.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/Stage.java index ec39bd42da..f745cab31a 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/Stage.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/Stage.java @@ -18,6 +18,12 @@ package org.apache.hadoop.hive.common.jsonexplain; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.jsonexplain.Vertex.VertexType; +import org.json.JSONArray; +import org.json.JSONException; +import org.json.JSONObject; + import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; @@ -25,12 +31,6 @@ import java.util.Map; import java.util.TreeMap; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.jsonexplain.Vertex.VertexType; -import org.json.JSONArray; -import org.json.JSONException; -import org.json.JSONObject; - public final class Stage { //external name is used to show at the console String externalName; @@ -228,10 +228,7 @@ private boolean isPrintable(Object val) { || val instanceof Double || val instanceof Path) { return true; } - if (val != null && val.getClass().isPrimitive()) { - return true; - } - return false; + return val != null && val.getClass().isPrimitive(); } public void print(Printer printer, int indentFlag) throws Exception { diff --git common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java index c54e01bc05..2b25d5d541 100644 --- common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java +++ common/src/java/org/apache/hadoop/hive/common/jsonexplain/Vertex.java @@ -18,15 +18,6 @@ package org.apache.hadoop.hive.common.jsonexplain; -import java.io.IOException; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - import org.apache.hadoop.hive.common.jsonexplain.Op.OpType; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; @@ -36,6 +27,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + public final class Vertex implements Comparable{ public final String name; // the stage that this vertex belongs to @@ -68,14 +66,16 @@ public String tag; protected final Logger LOG = LoggerFactory.getLogger(this.getClass().getName()); - public static enum VertexType { + public enum VertexType { MAP, REDUCE, UNION, UNKNOWN - }; - public VertexType vertexType; + } - public static enum EdgeType { + public final VertexType vertexType; + + public enum EdgeType { BROADCAST, SHUFFLE, MULTICAST, PARTITION_ONLY_SHUFFLE, FORWARD, XPROD_EDGE, UNKNOWN - }; + } + public String edgeType; public Vertex(String name, JSONObject vertexObject, Stage stage, DagJsonParser dagJsonParser) { @@ -117,11 +117,15 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin IOException, Exception { if (vertexObject.length() != 0) { for (String key : JSONObject.getNames(vertexObject)) { - if (key.equals("Map Operator Tree:")) { + switch (key) { + case "Map Operator Tree:": extractOp(vertexObject.getJSONArray(key).getJSONObject(0), null); - } else if (key.equals("Reduce Operator Tree:") || key.equals("Processor Tree:")) { + break; + case "Reduce Operator Tree:": + case "Processor Tree:": extractOp(vertexObject.getJSONObject(key), null); - } else if (key.equals("Join:")) { + break; + case "Join:": // this is the case when we have a map-side SMB join // one input of the join is treated as a dummy vertex JSONArray array = vertexObject.getJSONArray(key); @@ -132,26 +136,33 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin v.dummy = true; mergeJoinDummyVertexs.add(v); } - } else if (key.equals("Merge File Operator")) { + break; + case "Merge File Operator": JSONObject opTree = vertexObject.getJSONObject(key); if (opTree.has("Map Operator Tree:")) { extractOp(opTree.getJSONArray("Map Operator Tree:").getJSONObject(0), null); } else { throw new Exception("Merge File Operator does not have a Map Operator Tree"); } - } else if (key.equals("Execution mode:")) { + break; + case "Execution mode:": executionMode = " " + vertexObject.getString(key); - } else if (key.equals("tagToInput:")) { + break; + case "tagToInput:": JSONObject tagToInput = vertexObject.getJSONObject(key); for (String tag : JSONObject.getNames(tagToInput)) { this.tagToInput.put(tag, (String) tagToInput.get(tag)); } - } else if (key.equals("tag:")) { + break; + case "tag:": this.tag = vertexObject.getString(key); - } else if (key.equals("Local Work:")) { + break; + case "Local Work:": extractOp(vertexObject.getJSONObject(key), null); - } else { + break; + default: LOG.warn("Skip unsupported " + key + " in vertex " + this.name); + break; } } } @@ -159,7 +170,6 @@ public void extractOpTree() throws JSONException, JsonParseException, JsonMappin /** * @param object - * @param isInput * @param parent * @return * @throws JSONException @@ -202,14 +212,18 @@ Op extractOp(JSONObject object, Op parent) throws JSONException, JsonParseExcept + "'s children operator is neither a jsonobject nor a jsonarray"); } } else { - if (attrName.equals("OperatorId:")) { + switch (attrName) { + case "OperatorId:": op.setOperatorId(attrObj.get(attrName).toString()); - } else if (attrName.equals("outputname:")) { + break; + case "outputname:": op.outputVertexName = attrObj.get(attrName).toString(); - } else { + break; + default: if (!attrObj.get(attrName).toString().isEmpty()) { attrs.put(attrName, attrObj.get(attrName).toString()); } + break; } } } @@ -225,7 +239,7 @@ Op extractOp(JSONObject object, Op parent) throws JSONException, JsonParseExcept } public void print(Printer printer, int indentFlag, String type, Vertex callingVertex) - throws JSONException, Exception { + throws Exception { // print vertexname if (parser.printSet.contains(this) && numReduceOp <= 1) { if (type != null) { @@ -272,8 +286,7 @@ public void print(Printer printer, int indentFlag, String type, Vertex callingVe if (vertexType == VertexType.UNION) { // print dependent vertexs indentFlag++; - for (int index = 0; index < this.parentConnections.size(); index++) { - Connection connection = this.parentConnections.get(index); + for (Connection connection : this.parentConnections) { connection.from.print(printer, indentFlag, connection.type, this); } } diff --git common/src/java/org/apache/hadoop/hive/common/log/InPlaceUpdate.java common/src/java/org/apache/hadoop/hive/common/log/InPlaceUpdate.java index 37cc12d303..6249fde9fa 100644 --- common/src/java/org/apache/hadoop/hive/common/log/InPlaceUpdate.java +++ common/src/java/org/apache/hadoop/hive/common/log/InPlaceUpdate.java @@ -17,21 +17,20 @@ */ package org.apache.hadoop.hive.common.log; -import com.google.common.base.Function; -import com.google.common.collect.Lists; -import jline.TerminalFactory; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.fusesource.jansi.Ansi; -import javax.annotation.Nullable; import java.io.PrintStream; import java.io.StringWriter; import java.text.DecimalFormat; import java.util.List; +import java.util.stream.Collectors; import static org.fusesource.jansi.Ansi.ansi; -import static org.fusesource.jansi.internal.CLibrary.*; +import static org.fusesource.jansi.internal.CLibrary.STDERR_FILENO; +import static org.fusesource.jansi.internal.CLibrary.STDOUT_FILENO; +import static org.fusesource.jansi.internal.CLibrary.isatty; /** * Renders information from ProgressMonitor to the stream provided. @@ -52,7 +51,7 @@ /* Pretty print the values */ private final DecimalFormat secondsFormatter = new DecimalFormat("#0.00"); private int lines = 0; - private PrintStream out; + private final PrintStream out; public InPlaceUpdate(PrintStream out) { this.out = out; @@ -158,13 +157,8 @@ public void render(ProgressMonitor monitor) { // Map 1 .......... container SUCCEEDED 7 7 0 0 0 0 - List printReady = Lists.transform(monitor.rows(), new Function, String>() { - @Nullable - @Override - public String apply(@Nullable List row) { - return String.format(VERTEX_FORMAT, row.toArray()); - } - }); + List printReady = + monitor.rows().stream().map(row -> String.format(VERTEX_FORMAT, row.toArray())).collect(Collectors.toList()); reprintMultiLine(StringUtils.join(printReady, "\n")); // ------------------------------------------------------------------------------- diff --git common/src/java/org/apache/hadoop/hive/common/log/LogRedirector.java common/src/java/org/apache/hadoop/hive/common/log/LogRedirector.java index 503dbc245f..2bd8c4660b 100644 --- common/src/java/org/apache/hadoop/hive/common/log/LogRedirector.java +++ common/src/java/org/apache/hadoop/hive/common/log/LogRedirector.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; - import java.util.List; /** @@ -60,7 +59,7 @@ public LogRedirector(InputStream in, Logger logger, List errLogs, @Override public void run() { try { - String line = null; + String line; while ((line = in.readLine()) != null) { logger.info(line); if (errLogs != null) { diff --git common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java index d05c7289e5..77ba7eed4a 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/LegacyMetrics.java @@ -24,13 +24,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.lang.management.ManagementFactory; -import java.util.HashMap; - import javax.management.JMException; import javax.management.MBeanServer; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; +import java.lang.management.ManagementFactory; +import java.util.HashMap; /** * This class may eventually get superseded by org.apache.hadoop.hive.common.metrics2.Metrics. @@ -127,7 +126,7 @@ public void close() { Long num = metrics.incrementCounter(numCounter); Long time = metrics.incrementCounter(timeCounter, endTime - startTime); if (num != null && time != null) { - metrics.set(avgTimeCounter, Double.valueOf(time.doubleValue() / num.doubleValue())); + metrics.set(avgTimeCounter, time.doubleValue() / num.doubleValue()); } } } else { @@ -162,12 +161,7 @@ public void reopen() { } private static final ThreadLocal> threadLocalScopes - = new ThreadLocal>() { - @Override - protected HashMap initialValue() { - return new HashMap(); - } - }; + = ThreadLocal.withInitial(HashMap::new); public LegacyMetrics(HiveConf conf) throws Exception { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); @@ -175,14 +169,14 @@ public LegacyMetrics(HiveConf conf) throws Exception { } public Long incrementCounter(String name) { - return incrementCounter(name,Long.valueOf(1)); + return incrementCounter(name, 1L); } public Long incrementCounter(String name, long increment) { Long value = null; synchronized(metrics) { if (!metrics.hasKey(name)) { - value = Long.valueOf(increment); + value = increment; set(name, value); } else { try { @@ -198,14 +192,14 @@ public Long incrementCounter(String name, long increment) { } public Long decrementCounter(String name) { - return decrementCounter(name, Long.valueOf(1)); + return decrementCounter(name, 1L); } public Long decrementCounter(String name, long decrement) { Long value = null; synchronized(metrics) { if (!metrics.hasKey(name)) { - value = Long.valueOf(decrement); + value = decrement; set(name, -value); } else { try { diff --git common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java index b8022230db..fcdf279719 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java @@ -29,14 +29,14 @@ /** * Check if we're tracking a certain named key/metric */ - public abstract boolean hasKey(String name); + boolean hasKey(String name); /** * Add a key/metric and its value to track * @param name Name of the key/metric * @param value value associated with the key */ - public abstract void put(String name, Object value); + void put(String name, Object value); /** * @@ -44,7 +44,7 @@ * @return value associated with the key * @throws JMException */ - public abstract Object get(String name) throws JMException; + Object get(String name) throws JMException; /** * Removes all the keys and values from this MetricsMBean. diff --git common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java index de650f5c49..66c6f19b43 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java @@ -17,9 +17,6 @@ */ package org.apache.hadoop.hive.common.metrics; -import java.util.HashMap; -import java.util.Map; - import javax.management.Attribute; import javax.management.AttributeList; import javax.management.AttributeNotFoundException; @@ -32,11 +29,13 @@ import javax.management.MBeanNotificationInfo; import javax.management.MBeanOperationInfo; import javax.management.ReflectionException; +import java.util.HashMap; +import java.util.Map; public class MetricsMBeanImpl implements MetricsMBean { - private final Map metricsMap = new HashMap(); + private final Map metricsMap = new HashMap<>(); private MBeanAttributeInfo[] attributeInfos; private boolean dirtyAttributeInfoCache = true; @@ -154,7 +153,7 @@ public Object get(String name) throws JMException { public void reset() { synchronized(metricsMap) { for (String key : metricsMap.keySet()) { - metricsMap.put(key, Long.valueOf(0)); + metricsMap.put(key, 0L); } } } diff --git common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java index 99d3e57d84..233f61ccaa 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/common/Metrics.java @@ -25,33 +25,33 @@ /** * Deinitializes the Metrics system. */ - public void close() throws Exception; + void close() throws Exception; /** * * @param name starts a scope of a given name. Scopes is stored as thread-local variable. */ - public void startStoredScope(String name); + void startStoredScope(String name); /** * Closes the stored scope of a given name. * Note that this must be called on the same thread as where the scope was started. * @param name */ - public void endStoredScope(String name); + void endStoredScope(String name); /** * Create scope with given name and returns it. * @param name * @return */ - public MetricsScope createScope(String name); + MetricsScope createScope(String name); /** * Close the given scope. * @param scope */ - public void endScope(MetricsScope scope); + void endScope(MetricsScope scope); //Counter-related methods @@ -60,7 +60,7 @@ * @param name * @return */ - public Long incrementCounter(String name); + Long incrementCounter(String name); /** * Increments a counter of the given name by "increment" @@ -68,7 +68,7 @@ * @param increment * @return */ - public Long incrementCounter(String name, long increment); + Long incrementCounter(String name, long increment); /** @@ -76,7 +76,7 @@ * @param name * @return */ - public Long decrementCounter(String name); + Long decrementCounter(String name); /** * Decrements a counter of the given name by "decrement" @@ -84,7 +84,7 @@ * @param decrement * @return */ - public Long decrementCounter(String name, long decrement); + Long decrementCounter(String name, long decrement); /** @@ -92,14 +92,14 @@ * @param name name of gauge * @param variable variable to track. */ - public void addGauge(String name, final MetricsVariable variable); + void addGauge(String name, final MetricsVariable variable); /** * Removed the gauge added by addGauge. * @param name name of gauge */ - public void removeGauge(String name); + void removeGauge(String name); /** @@ -108,13 +108,12 @@ * @param numerator numerator of the ratio * @param denominator denominator of the ratio */ - public void addRatio(String name, MetricsVariable numerator, - MetricsVariable denominator); + void addRatio(String name, MetricsVariable numerator, MetricsVariable denominator); /** * Mark an event occurance for a meter. Meters measure the rate of an event and track * 1/5/15 minute moving averages * @param name name of the meter */ - public void markMeter(String name); + void markMeter(String name); } diff --git common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java index b8e9a01bbe..5b57b314c2 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsFactory.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.common.metrics.common; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.util.ReflectionUtils; import java.lang.reflect.Constructor; diff --git common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java index 94b91b9cd9..e6b238fe09 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java @@ -22,5 +22,5 @@ * currently active connections. */ public interface MetricsVariable { - public T getValue(); + T getValue(); } \ No newline at end of file diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java index 4f35a6da60..3eca6b8eb1 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java @@ -18,11 +18,9 @@ package org.apache.hadoop.hive.common.metrics.metrics2; -import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.Counter; import com.codahale.metrics.ExponentiallyDecayingReservoir; import com.codahale.metrics.Gauge; -import com.codahale.metrics.JmxReporter; import com.codahale.metrics.Meter; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; @@ -35,7 +33,6 @@ import com.codahale.metrics.jvm.MemoryUsageGaugeSet; import com.codahale.metrics.jvm.ThreadStatesGaugeSet; import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.joshelser.dropwizard.metrics.hadoop.HadoopMetrics2Reporter; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; @@ -43,32 +40,22 @@ import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.Lists; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.metrics.common.MetricsConstant; import org.apache.hadoop.hive.common.metrics.common.MetricsScope; import org.apache.hadoop.hive.common.metrics.common.MetricsVariable; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.BufferedWriter; import java.io.Closeable; -import java.io.IOException; -import java.io.OutputStreamWriter; import java.lang.management.ManagementFactory; -import java.net.URI; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.TimerTask; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -88,21 +75,16 @@ private final Lock gaugesLock = new ReentrantLock(); private final Lock metersLock = new ReentrantLock(); - private LoadingCache timers; - private LoadingCache counters; - private LoadingCache meters; - private ConcurrentHashMap gauges; + private final LoadingCache timers; + private final LoadingCache counters; + private final LoadingCache meters; + private final ConcurrentHashMap gauges; - private HiveConf conf; - private final Set reporters = new HashSet(); + private final HiveConf conf; + private final Set reporters = new HashSet<>(); private final ThreadLocal> threadLocalScopes - = new ThreadLocal>() { - @Override - protected HashMap initialValue() { - return new HashMap(); - } - }; + = ThreadLocal.withInitial(HashMap::new); public class CodahaleMetricsScope implements MetricsScope { @@ -183,7 +165,7 @@ public Meter load(String key) { } } ); - gauges = new ConcurrentHashMap(); + gauges = new ConcurrentHashMap<>(); //register JVM metrics registerAll("gc", new GarbageCollectorMetricSet()); @@ -199,10 +181,8 @@ public Meter load(String key) { @Override public void close() throws Exception { - if (reporters != null) { - for (Closeable reporter : reporters) { - reporter.close(); - } + for (Closeable reporter : reporters) { + reporter.close(); } for (Map.Entry metric : metricRegistry.getMetrics().entrySet()) { metricRegistry.remove(metric.getKey()); @@ -285,12 +265,7 @@ public Long decrementCounter(String name, long decrement) { @Override public void addGauge(String name, final MetricsVariable variable) { - Gauge gauge = new Gauge() { - @Override - public Object getValue() { - return variable.getValue(); - } - }; + Gauge gauge = variable::getValue; addGaugeInternal(name, gauge); } @@ -415,7 +390,7 @@ private boolean initCodahaleMetricsReporterClasses() { } for (String reporterClass : reporterClasses) { - Class name = null; + Class name; try { name = conf.getClassByName(reporterClass); } catch (ClassNotFoundException e) { @@ -452,7 +427,7 @@ private boolean initMetricsReporter() { return false; } - MetricsReporting reporter = null; + MetricsReporting reporter; for (String metricsReportingName : metricsReporterNames) { try { reporter = MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase()); @@ -477,10 +452,8 @@ private boolean initMetricsReporter() { default: LOGGER.warn("Unhandled reporter " + reporter + " provided."); } - if (codahaleReporter != null) { - codahaleReporter.start(); - reporters.add(codahaleReporter); - } + codahaleReporter.start(); + reporters.add(codahaleReporter); } return true; } diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java index 88a5ea07ca..f2b678ddcd 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.common.metrics.metrics2; import com.codahale.metrics.Reporter; + import java.io.Closeable; public interface CodahaleReporter extends Closeable, Reporter { @@ -25,5 +26,5 @@ /** * Start the reporter. */ - public void start(); + void start(); } diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java index fd7613719f..fe3d4a6143 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/ConsoleMetricsReporter.java @@ -20,11 +20,10 @@ import com.codahale.metrics.ConsoleReporter; import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.Reporter; -import java.io.Closeable; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; +import java.util.concurrent.TimeUnit; + /** * A wrapper around Codahale ConsoleReporter to make it a pluggable/configurable Hive Metrics reporter. diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java index 342cb70a3a..36654cfa79 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JmxMetricsReporter.java @@ -19,11 +19,10 @@ import com.codahale.metrics.JmxReporter; import com.codahale.metrics.MetricRegistry; -import com.codahale.metrics.Reporter; -import java.io.Closeable; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; +import java.util.concurrent.TimeUnit; + /** * A wrapper around Codahale JmxReporter to make it a pluggable/configurable Hive Metrics reporter. */ diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java index 66f298c621..8a1ca10411 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/JsonFileMetricsReporter.java @@ -138,7 +138,7 @@ public void run() { Path tmpFile = null; try { // Dump metrics to string as JSON - String json = null; + String json; try { json = jsonWriter.writeValueAsString(metricRegistry); } catch (JsonProcessingException e) { diff --git common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java index 2d9351d37a..7c1ca66f55 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/Metrics2Reporter.java @@ -19,11 +19,10 @@ import com.codahale.metrics.MetricRegistry; import com.github.joshelser.dropwizard.metrics.hadoop.HadoopMetrics2Reporter; -import java.io.Closeable; -import java.util.concurrent.TimeUnit; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; -import com.codahale.metrics.Reporter; + +import java.util.concurrent.TimeUnit; /** * A wrapper around Codahale HadoopMetrics2Reporter to make it a pluggable/configurable Hive Metrics reporter. diff --git common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java index 194fc1414c..23f10dca49 100644 --- common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java +++ common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.common.type; +import org.apache.hive.common.util.Decimal128FastBuffer; + import java.math.BigDecimal; import java.math.BigInteger; -import java.nio.ByteBuffer; import java.nio.IntBuffer; - -import org.apache.hive.common.util.Decimal128FastBuffer; +import java.util.Objects; /** * This code was based on code from Microsoft's PolyBase. @@ -128,8 +128,6 @@ public static int getIntsPerElement(int precision) { /** Construct a zero. */ public Decimal128() { this.unscaledValue = new UnsignedInt128(); - this.scale = 0; - this.signum = 0; } /** @@ -182,7 +180,6 @@ public Decimal128(UnsignedInt128 unscaledVal, short scale, boolean negative) { this.unscaledValue = new UnsignedInt128(unscaledVal); this.scale = scale; if (unscaledValue.isZero()) { - this.signum = 0; } else { this.signum = negative ? (byte) -1 : (byte) 1; } @@ -1100,7 +1097,7 @@ public Decimal128 subtractDestructive(Decimal128 right, short scale) { */ public static void multiply(Decimal128 left, Decimal128 right, Decimal128 result, short scale) { - if (result == left || result == right) { + if (Objects.equals(result, left) || Objects.equals(result, right)) { throw new IllegalArgumentException( "result object cannot be left or right operand"); } @@ -1210,7 +1207,7 @@ public void multiplyDestructive(Decimal128 right, short newScale) { */ public static void divide(Decimal128 left, Decimal128 right, Decimal128 quotient, short scale) { - if (quotient == left || quotient == right) { + if (Objects.equals(quotient, left) || Objects.equals(quotient, right)) { throw new IllegalArgumentException( "result object cannot be left or right operand"); } @@ -1501,7 +1498,7 @@ public UnsignedInt128 getUnscaledValue() { */ @Override public int compareTo(Decimal128 val) { - if (val == this) { + if (Objects.equals(val, this)) { return 0; } @@ -1729,7 +1726,7 @@ private static void checkScaleRange(short scale) { /** * Temporary array used in {@link #getHiveDecimalString} */ - private int [] tmpArray = new int[2]; + private final int [] tmpArray = new int[2]; /** * Returns the string representation of this value. It discards the trailing zeros diff --git common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java index 7fb4e9f098..591397ff6f 100644 --- common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java +++ common/src/java/org/apache/hadoop/hive/common/type/HiveIntervalYearMonth.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hive.common.type; +import org.apache.hive.common.util.DateUtils; + import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.hive.common.util.DateUtils; - public class HiveIntervalYearMonth implements Comparable { // years/months represented in months @@ -116,7 +116,7 @@ public String toString() { } public static HiveIntervalYearMonth valueOf(String strVal) { - HiveIntervalYearMonth result = null; + HiveIntervalYearMonth result; if (strVal == null) { throw new IllegalArgumentException("Interval year-month string was null"); } @@ -152,10 +152,6 @@ public static HiveIntervalYearMonth valueOf(String strVal) { private final static String PARSE_PATTERN = "([+|-])?(\\d+)-(\\d+)"; - private static final ThreadLocal PATTERN_MATCHER = new ThreadLocal() { - @Override - protected Matcher initialValue() { - return Pattern.compile(PARSE_PATTERN).matcher(""); - } - }; + private static final ThreadLocal PATTERN_MATCHER = + ThreadLocal.withInitial(() -> Pattern.compile(PARSE_PATTERN).matcher("")); } diff --git common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java index 9da8cc91b1..49e095232c 100644 --- common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java +++ common/src/java/org/apache/hadoop/hive/common/type/SignedInt128.java @@ -667,7 +667,7 @@ public void subtractDestructive(SignedInt128 right) { } byte signum = UnsignedInt128.difference(this.mag, right.mag, this.mag); - this.negative = (signum > 0 ? this.negative : !this.negative); + this.negative = ((signum > 0) == this.negative); } /** @@ -683,7 +683,7 @@ public void subtractDestructive(SignedInt128 right) { */ public static void multiply(SignedInt128 left, SignedInt128 right, SignedInt128 result) { - if (result == left || result == right) { + if (result.equals(left) || result.equals(right)) { throw new IllegalArgumentException( "result object cannot be left or right operand"); } @@ -768,7 +768,7 @@ public int divideDestructive(int right) { */ public static void divide(SignedInt128 left, SignedInt128 right, SignedInt128 quotient, SignedInt128 remainder) { - if (quotient == left || quotient == right) { + if (quotient.equals(left) || quotient.equals(right)) { throw new IllegalArgumentException( "result object cannot be left or right operand"); } diff --git common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java index 213650c2a5..de788bb587 100644 --- common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java +++ common/src/java/org/apache/hadoop/hive/common/type/TimestampTZUtil.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hive.common.type; -import java.text.DateFormat; -import java.text.SimpleDateFormat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.time.DateTimeException; import java.time.LocalDate; import java.time.LocalTime; @@ -33,9 +34,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class TimestampTZUtil { private static final Logger LOG = LoggerFactory.getLogger(TimestampTZ.class); @@ -104,7 +102,7 @@ private static String handleSingleDigitHourOffset(String s) { Matcher matcher = SINGLE_DIGIT_PATTERN.matcher(s); if (matcher.find()) { int index = matcher.start() + 1; - s = s.substring(0, index) + "0" + s.substring(index, s.length()); + s = s.substring(0, index) + "0" + s.substring(index); } return s; } diff --git common/src/java/org/apache/hadoop/hive/common/type/UnsignedInt128.java common/src/java/org/apache/hadoop/hive/common/type/UnsignedInt128.java index 361835c685..d5cb4f4ddb 100644 --- common/src/java/org/apache/hadoop/hive/common/type/UnsignedInt128.java +++ common/src/java/org/apache/hadoop/hive/common/type/UnsignedInt128.java @@ -18,14 +18,14 @@ package org.apache.hadoop.hive.common.type; +import org.apache.hive.common.util.Decimal128FastBuffer; + import java.io.Serializable; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.util.Arrays; -import org.apache.hive.common.util.Decimal128FastBuffer; - /** * This code was based on code from Microsoft's PolyBase. * @@ -65,7 +65,7 @@ * Int32 elements as little-endian (v[0] is least significant) unsigned * integers. */ - private int[] v = new int[INT_COUNT]; + private final int[] v = new int[INT_COUNT]; /** * Number of leading non-zero elements in {@link #v}. For example, if the @@ -1790,7 +1790,7 @@ private void shiftRightDestructive(int wordShifts, int bitShiftsInWord, final int roundCarryNoRestoreMask = 1 << 31; final int roundCarryMask = (1 << (bitShiftsInWord - 1)); boolean roundCarry; - int z0 = 0, z1 = 0, z2 = 0, z3 = 0; + int z0, z1 = 0, z2 = 0, z3 = 0; switch (wordShifts) { case 3: @@ -2184,7 +2184,7 @@ private static void scaleDownTenArray8RoundUp(int[] array, short tenScale) { } } - int z4 = 0, z5 = 0, z6 = 0, z7 = 0; // because inverse is scaled 2^128, + int z4, z5, z6, z7; // because inverse is scaled 2^128, // these will become v0-v3 int z8 = 0, z9 = 0, z10 = 0; // for wordshift long product = 0L; @@ -2519,7 +2519,7 @@ private static void fastSerializeIntPartForHiveDecimal(ByteBuffer buf, public int fastSerializeForHiveDecimal(Decimal128FastBuffer scratch, byte signum) { int bufferUsed = this.count; ByteBuffer buf = scratch.getByteBuffer(bufferUsed); - buf.put(0, (byte) (signum == 1 ? 0 : signum)); + buf.put(0, (signum == 1 ? 0 : signum)); int pos = 1; int firstNonZero = 0; while(firstNonZero < this.count && v[firstNonZero] == 0) { @@ -2551,7 +2551,7 @@ public int fastSerializeForHiveDecimal(Decimal128FastBuffer scratch, byte signum * @return */ public byte fastUpdateFromInternalStorage(byte[] internalStorage) { - byte signum = 0; + byte signum; int skip = 0; this.count = 0; // Skip over any leading 0s or 0xFFs diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 53bb342e99..ed3c910e29 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -20,7 +20,6 @@ import com.google.common.base.Joiner; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Iterables; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.FileUtils; @@ -80,17 +79,17 @@ private static final Logger LOG = LoggerFactory.getLogger(HiveConf.class); private static boolean loadMetastoreConfig = false; private static boolean loadHiveServer2Config = false; - private static URL hiveDefaultURL = null; - private static URL hiveSiteURL = null; - private static URL hivemetastoreSiteUrl = null; - private static URL hiveServer2SiteUrl = null; + private static final URL hiveDefaultURL; + private static URL hiveSiteURL; + private static URL hivemetastoreSiteUrl; + private static final URL hiveServer2SiteUrl; private static byte[] confVarByteArray = null; - private static final Map vars = new HashMap(); - private static final Map metaConfs = new HashMap(); - private final List restrictList = new ArrayList(); - private final Set hiddenSet = new HashSet(); + private static final Map vars = new HashMap<>(); + private static final Map metaConfs = new HashMap<>(); + private final List restrictList = new ArrayList<>(); + private final Set hiddenSet = new HashSet<>(); private final List rscList = new ArrayList<>(); private Pattern modWhiteListPattern = null; @@ -407,7 +406,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal * Get a set containing configuration parameter names used by LLAP Server isntances * @return an unmodifiable set containing llap ConfVars */ - public static final Set getLlapDaemonConfVars() { + public static Set getLlapDaemonConfVars() { return llapDaemonVarsSet; } @@ -427,7 +426,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal * with non-null values to this list as they will override any values defined * in the underlying Hadoop configuration. */ - public static enum ConfVars { + public enum ConfVars { // QL execution stuff SCRIPTWRAPPER("hive.exec.script.wrapper", null, ""), PLAN("hive.exec.plan", "", ""), @@ -2474,7 +2473,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "in the number of rows filtered by a certain operator, which in turn might lead to overprovision or\n" + "underprovision of resources. This factor is applied to the cardinality estimation of IN clauses in\n" + "filter operators."), - HIVE_STATS_IN_MIN_RATIO("hive.stats.filter.in.min.ratio", (float) 0.0f, + HIVE_STATS_IN_MIN_RATIO("hive.stats.filter.in.min.ratio", 0.0f, "Output estimation of an IN filter can't be lower than this ratio"), HIVE_STATS_UDTF_FACTOR("hive.stats.udtf.factor", (float) 1.0, "UDTFs change the number of rows of the output. A common UDTF is the explode() method that creates\n" + @@ -3862,10 +3861,11 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "Whether we should try to create additional opportunities for dynamic pruning, e.g., considering\n" + "siblings that may not be created by normal dynamic pruning logic.\n" + "Only works when dynamic pruning is enabled."), - TEZ_DYNAMIC_PARTITION_PRUNING_MAX_EVENT_SIZE("hive.tez.dynamic.partition.pruning.max.event.size", 1*1024*1024L, + TEZ_DYNAMIC_PARTITION_PRUNING_MAX_EVENT_SIZE("hive.tez.dynamic.partition.pruning.max.event.size", + 1024 * 1024L, "Maximum size of events sent by processors in dynamic pruning. If this size is crossed no pruning will take place."), - TEZ_DYNAMIC_PARTITION_PRUNING_MAX_DATA_SIZE("hive.tez.dynamic.partition.pruning.max.data.size", 100*1024*1024L, + TEZ_DYNAMIC_PARTITION_PRUNING_MAX_DATA_SIZE("hive.tez.dynamic.partition.pruning.max.data.size", 100 * 1024 * 1024L, "Maximum total data size of events in dynamic pruning."), TEZ_DYNAMIC_SEMIJOIN_REDUCTION("hive.tez.dynamic.semijoin.reduction", true, "When dynamic semijoin is enabled, shuffle joins will perform a leaky semijoin before shuffle. This " + @@ -4044,7 +4044,7 @@ private static void populateLlapDaemonVarsSet(Set llapDaemonVarsSetLocal "Enforce that col stats are available, before considering vertex"), LLAP_AUTO_MAX_INPUT("hive.llap.auto.max.input.size", 10*1024*1024*1024L, "Check input size, before considering vertex (-1 disables check)"), - LLAP_AUTO_MAX_OUTPUT("hive.llap.auto.max.output.size", 1*1024*1024*1024L, + LLAP_AUTO_MAX_OUTPUT("hive.llap.auto.max.output.size", 1024 * 1024 * 1024L, "Check output size, before considering vertex (-1 disables check)"), LLAP_SKIP_COMPILE_UDF_CHECK("hive.llap.skip.compile.udf.check", false, "Whether to skip the compile-time check for non-built-in UDFs when deciding whether to\n" + @@ -4803,7 +4803,7 @@ public String getDefaultExpr() { } private Set getValidStringValues() { - if (validator == null || !(validator instanceof StringSet)) { + if (!(validator instanceof StringSet)) { throw new RuntimeException(varname + " does not specify a list of valid values"); } return ((StringSet)validator).getExpected(); @@ -4883,8 +4883,7 @@ public void verifyAndSet(String name, String value) throws IllegalArgumentExcept + "It is not in list of params that are allowed to be modified at runtime"); } } - if (Iterables.any(restrictList, - restrictedVar -> name != null && name.startsWith(restrictedVar))) { + if (restrictList.stream().anyMatch(restrictedVar -> name != null && name.startsWith(restrictedVar))) { throw new IllegalArgumentException("Cannot modify " + name + " at runtime. It is in the list" + " of parameters that can't be modified at runtime or is prefixed by a restricted variable"); } @@ -4900,13 +4899,12 @@ public void verifyAndSet(String name, String value) throws IllegalArgumentExcept } public boolean isHiddenConfig(String name) { - return Iterables.any(hiddenSet, hiddenVar -> name.startsWith(hiddenVar)); + return hiddenSet.stream().anyMatch(name::startsWith); } public static boolean isEncodedPar(String name) { for (ConfVars confVar : HiveConf.ENCODED_CONF) { - ConfVars confVar1 = confVar; - if (confVar1.varname.equals(name)) { + if (confVar.varname.equals(name)) { return true; } } @@ -5011,17 +5009,17 @@ public static long toSizeBytes(String value) { return new String[] {value.substring(0, i), value.substring(i)}; } - private static Set daysSet = ImmutableSet.of("d", "D", "day", "DAY", "days", "DAYS"); - private static Set hoursSet = ImmutableSet.of("h", "H", "hour", "HOUR", "hours", "HOURS"); - private static Set minutesSet = ImmutableSet.of("m", "M", "min", "MIN", "mins", "MINS", + private static final Set daysSet = ImmutableSet.of("d", "D", "day", "DAY", "days", "DAYS"); + private static final Set hoursSet = ImmutableSet.of("h", "H", "hour", "HOUR", "hours", "HOURS"); + private static final Set minutesSet = ImmutableSet.of("m", "M", "min", "MIN", "mins", "MINS", "minute", "MINUTE", "minutes", "MINUTES"); - private static Set secondsSet = ImmutableSet.of("s", "S", "sec", "SEC", "secs", "SECS", + private static final Set secondsSet = ImmutableSet.of("s", "S", "sec", "SEC", "secs", "SECS", "second", "SECOND", "seconds", "SECONDS"); - private static Set millisSet = ImmutableSet.of("ms", "MS", "msec", "MSEC", "msecs", "MSECS", + private static final Set millisSet = ImmutableSet.of("ms", "MS", "msec", "MSEC", "msecs", "MSECS", "millisecond", "MILLISECOND", "milliseconds", "MILLISECONDS"); - private static Set microsSet = ImmutableSet.of("us", "US", "usec", "USEC", "usecs", "USECS", + private static final Set microsSet = ImmutableSet.of("us", "US", "usec", "USEC", "usecs", "USECS", "microsecond", "MICROSECOND", "microseconds", "MICROSECONDS"); - private static Set nanosSet = ImmutableSet.of("ns", "NS", "nsec", "NSEC", "nsecs", "NSECS", + private static final Set nanosSet = ImmutableSet.of("ns", "NS", "nsec", "NSEC", "nsecs", "NSECS", "nanosecond", "NANOSECOND", "nanoseconds", "NANOSECONDS"); public static TimeUnit unitFor(String unit, TimeUnit defaultUnit) { unit = unit.trim().toLowerCase(); @@ -5388,7 +5386,7 @@ private void initialize(Class cls) { } if (getBoolVar(HiveConf.ConfVars.HIVECONFVALIDATION)) { - List trimmed = new ArrayList(); + List trimmed = new ArrayList<>(); for (Map.Entry entry : this) { String key = entry.getKey(); if (key == null || !key.startsWith("hive.")) { @@ -5634,7 +5632,7 @@ public ZoneId getLocalTimeZone() { //Take care of conf overrides. //Includes values in ConfVars as well as underlying configuration properties (ie, hadoop) - public static final Map overrides = new HashMap(); + public static final Map overrides = new HashMap<>(); /** * Apply system properties to this object if the property name is defined in ConfVars @@ -5652,7 +5650,7 @@ private void applySystemProperties() { * which have been set using System properties */ public static Map getConfSystemProperties() { - Map systemProperties = new HashMap(); + Map systemProperties = new HashMap<>(); for (ConfVars oneVar : ConfVars.values()) { if (System.getProperty(oneVar.varname) != null) { @@ -5939,17 +5937,17 @@ private static boolean isAllowed(Configuration conf, ConfVars setting) { } public static String getNonMrEngines() { - String result = StringUtils.EMPTY; + StringBuilder result = new StringBuilder(StringUtils.EMPTY); for (String s : ConfVars.HIVE_EXECUTION_ENGINE.getValidStringValues()) { if ("mr".equals(s)) { continue; } - if (!result.isEmpty()) { - result += ", "; + if (result.length() > 0) { + result.append(", "); } - result += s; + result.append(s); } - return result; + return result.toString(); } public static String generateMrDeprecationWarning() { diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java index 2ad5f9ee39..545ba0c47a 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.conf; -import com.google.common.collect.Iterables; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; @@ -30,7 +29,6 @@ import java.io.File; import java.util.ArrayList; -import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; @@ -38,6 +36,8 @@ import java.util.Map; import java.util.Set; import java.util.StringTokenizer; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; /** * Hive Configuration utils @@ -54,7 +54,7 @@ * @return */ public static boolean isEmbeddedMetaStore(String msUri) { - return (msUri == null) ? true : msUri.trim().isEmpty(); + return (msUri == null) || msUri.trim().isEmpty(); } /** @@ -77,7 +77,7 @@ public static StringBuilder dumpConfig(HiveConf conf) { * @return The list of the configuration values to hide */ public static Set getHiddenSet(Configuration configuration) { - Set hiddenSet = new HashSet(); + Set hiddenSet = new HashSet<>(); String hiddenListStr = HiveConf.getVar(configuration, HiveConf.ConfVars.HIVE_CONF_HIDDEN_LIST); if (hiddenListStr != null) { for (String entry : hiddenListStr.split(",")) { @@ -95,15 +95,14 @@ public static StringBuilder dumpConfig(HiveConf conf) { public static void stripConfigurations(Configuration conf, Set hiddenSet) { // Find all configurations where the key contains any string from hiddenSet - Iterable> matching = - Iterables.filter(conf, confEntry -> { - for (String name : hiddenSet) { - if (confEntry.getKey().startsWith(name)) { - return true; - } - } - return false; - }); + Iterable> matching = StreamSupport.stream(conf.spliterator(), false).filter(confEntry -> { + for (String name : hiddenSet) { + if (confEntry.getKey().startsWith(name)) { + return true; + } + } + return false; + }).collect(Collectors.toList()); // Remove the value of every key found matching matching.forEach(entry -> conf.set(entry.getKey(), StringUtils.EMPTY)); @@ -131,12 +130,7 @@ public static void dumpConfig(Configuration originalConf, StringBuilder sb) { while(configIter.hasNext()) { configVals.add(configIter.next()); } - Collections.sort(configVals, new Comparator>() { - @Override - public int compare(Map.Entry ent, Map.Entry ent2) { - return ent.getKey().compareTo(ent2.getKey()); - } - }); + configVals.sort(Comparator.comparing(Map.Entry::getKey)); for(Map.Entry entry : configVals) { //use get() to make sure variable substitution works if(entry.getKey().toLowerCase().contains("path")) { @@ -210,7 +204,7 @@ public static void updateJobCredentialProviders(Configuration jobConf) { public static String getJobCredentialProviderPassword(Configuration conf) { String jobKeyStoreLocation = conf.get(HiveConf.ConfVars.HIVE_SERVER2_JOB_CREDENTIAL_PROVIDER_PATH.varname); - String password = null; + String password; if(StringUtils.isNotBlank(jobKeyStoreLocation)) { password = System.getenv(Constants.HIVE_SERVER2_JOB_CREDSTORE_PASSWORD_ENVVAR); if (StringUtils.isNotBlank(password)) { diff --git common/src/java/org/apache/hadoop/hive/conf/LoopingByteArrayInputStream.java common/src/java/org/apache/hadoop/hive/conf/LoopingByteArrayInputStream.java index 4887d65a6b..9333bc304e 100644 --- common/src/java/org/apache/hadoop/hive/conf/LoopingByteArrayInputStream.java +++ common/src/java/org/apache/hadoop/hive/conf/LoopingByteArrayInputStream.java @@ -44,13 +44,7 @@ public LoopingByteArrayInputStream(byte[] buf) { this.buf = buf; } - private final ThreadLocal threadLocalByteArrayInputStream = - new ThreadLocal() { - @Override - protected ByteArrayInputStream initialValue() { - return null; - } - }; + private final ThreadLocal threadLocalByteArrayInputStream = ThreadLocal.withInitial(() -> null); private ByteArrayInputStream getByteArrayInputStream() { ByteArrayInputStream bais = threadLocalByteArrayInputStream.get(); diff --git common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java index 695f3ec01a..589cd13b5a 100644 --- common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java +++ common/src/java/org/apache/hadoop/hive/conf/SystemVariables.java @@ -17,10 +17,6 @@ */ package org.apache.hadoop.hive.conf; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import com.google.common.collect.ImmutableMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.valcoersion.JavaIOTmpdirVariableCoercion; @@ -28,10 +24,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + public class SystemVariables { private static final Logger l4j = LoggerFactory.getLogger(SystemVariables.class); - protected static Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}"); + protected static final Pattern varPat = Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}"); private static final SystemVariables INSTANCE = new SystemVariables(); private static final Map COERCIONS = ImmutableMap.builder() @@ -106,7 +106,7 @@ protected final String substitute(Configuration conf, String expr, int depth) { } else { found = true; } - builder.append(eval.substring(prev, match.start())).append(substitute); + builder.append(eval, prev, match.start()).append(substitute); prev = match.end(); } if (!found) { diff --git common/src/java/org/apache/hadoop/hive/conf/Validator.java common/src/java/org/apache/hadoop/hive/conf/Validator.java index 1539a35d62..22aeb131ec 100644 --- common/src/java/org/apache/hadoop/hive/conf/Validator.java +++ common/src/java/org/apache/hadoop/hive/conf/Validator.java @@ -41,7 +41,7 @@ class StringSet implements Validator { private final boolean caseSensitive; - private final Set expected = new LinkedHashSet(); + private final Set expected = new LinkedHashSet<>(); public StringSet(String... values) { this(false, values); @@ -55,7 +55,7 @@ public StringSet(boolean caseSensitive, String... values) { } public Set getExpected() { - return new HashSet(expected); + return new HashSet<>(expected); } @Override @@ -80,10 +80,7 @@ protected boolean inRange(String value, Object lower, Object upper) { if (lower != null && ivalue < (Integer)lower) { return false; } - if (upper != null && ivalue > (Integer)upper) { - return false; - } - return true; + return upper == null || ivalue <= (Integer) upper; } }, LONG { @@ -93,10 +90,7 @@ protected boolean inRange(String value, Object lower, Object upper) { if (lower != null && lvalue < (Long)lower) { return false; } - if (upper != null && lvalue > (Long)upper) { - return false; - } - return true; + return upper == null || lvalue <= (Long) upper; } }, FLOAT { @@ -106,10 +100,7 @@ protected boolean inRange(String value, Object lower, Object upper) { if (lower != null && fvalue < (Float)lower) { return false; } - if (upper != null && fvalue > (Float)upper) { - return false; - } - return true; + return upper == null || !(fvalue > (Float) upper); } }; @@ -170,7 +161,7 @@ public String toDescription() { class PatternSet implements Validator { - private final List expected = new ArrayList(); + private final List expected = new ArrayList<>(); public PatternSet(String... values) { for (String value : values) { @@ -343,14 +334,14 @@ private String sizeString(long size) { long current = 1; for (int i = 0; i < units.length && current > 0; ++i) { long next = current << 10; - if ((size & (next - 1)) != 0) return (long)(size / current) + units[i]; + if ((size & (next - 1)) != 0) return (size / current) + units[i]; current = next; } - return current > 0 ? ((long)(size / current) + "Pb") : (size + units[0]); + return current > 0 ? ((size / current) + "Pb") : (size + units[0]); } } - public class WritableDirectoryValidator implements Validator { + class WritableDirectoryValidator implements Validator { @Override public String validate(String value) { diff --git common/src/java/org/apache/hadoop/hive/conf/valcoersion/JavaIOTmpdirVariableCoercion.java common/src/java/org/apache/hadoop/hive/conf/valcoersion/JavaIOTmpdirVariableCoercion.java index d2a24f31a0..f1ba0c3439 100644 --- common/src/java/org/apache/hadoop/hive/conf/valcoersion/JavaIOTmpdirVariableCoercion.java +++ common/src/java/org/apache/hadoop/hive/conf/valcoersion/JavaIOTmpdirVariableCoercion.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.conf.valcoersion; -import java.io.IOException; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; @@ -26,6 +24,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; + /** * Enforces absolute paths to be used for the java.io.tmpdir system variable. * @see VariableCoercion diff --git common/src/java/org/apache/hadoop/hive/conf/valcoersion/VariableCoercion.java common/src/java/org/apache/hadoop/hive/conf/valcoersion/VariableCoercion.java index d6e5f080a4..70b2ca0f35 100644 --- common/src/java/org/apache/hadoop/hive/conf/valcoersion/VariableCoercion.java +++ common/src/java/org/apache/hadoop/hive/conf/valcoersion/VariableCoercion.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.conf.valcoersion; -import org.apache.hadoop.conf.Configuration; - /** * VariableCoercions are used to enforce rules related to system variables. * These rules may transform the value of system properties returned by the diff --git common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java index 0ee41c0898..31e9812c00 100644 --- common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java +++ common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java @@ -93,11 +93,11 @@ public static final String LOAD_PARTITION = "LoadPartition"; public static final String LOAD_DYNAMIC_PARTITIONS = "LoadDynamicPartitions"; - protected final Map startTimes = new HashMap(); - protected final Map endTimes = new HashMap(); + protected final Map startTimes = new HashMap<>(); + protected final Map endTimes = new HashMap<>(); static final private Logger LOG = LoggerFactory.getLogger(PerfLogger.class.getName()); - protected static final ThreadLocal perfLogger = new ThreadLocal(); + protected static final ThreadLocal perfLogger = new ThreadLocal<>(); private PerfLogger() { @@ -134,7 +134,7 @@ public static void setPerfLogger(PerfLogger resetPerfLogger) { */ public void PerfLogBegin(String callerName, String method) { long startTime = System.currentTimeMillis(); - startTimes.put(method, new Long(startTime)); + startTimes.put(method, startTime); if (LOG.isDebugEnabled()) { LOG.debug(""); } @@ -159,8 +159,8 @@ public long PerfLogEnd(String callerName, String method) { public long PerfLogEnd(String callerName, String method, String additionalInfo) { Long startTime = startTimes.get(method); long endTime = System.currentTimeMillis(); - endTimes.put(method, new Long(endTime)); - long duration = startTime == null ? -1 : endTime - startTime.longValue(); + endTimes.put(method, endTime); + long duration = startTime == null ? -1 : endTime - startTime; if (LOG.isDebugEnabled()) { StringBuilder sb = new StringBuilder(" dateFormatLocal = ThreadLocal.withInitial(() -> { + SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + simpleDateFormat.setLenient(false); + simpleDateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); + return simpleDateFormat; + }); public static SimpleDateFormat getDateFormat() { return dateFormatLocal.get(); diff --git common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java index 3900a45994..1bfd6f7237 100644 --- common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java +++ common/src/java/org/apache/hive/common/util/FixedSizedObjectPool.java @@ -17,13 +17,12 @@ */ package org.apache.hive.common.util; -import java.util.concurrent.atomic.AtomicLong; - +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hive.common.Pool; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.annotations.VisibleForTesting; +import java.util.concurrent.atomic.AtomicLong; /** Simple object pool of limited size. Implemented as a lock-free ring buffer; * may fail to produce items if there are too many concurrent users. */ @@ -64,7 +63,9 @@ public Marker(int markerShift, int deltaShift, int rcShift) { this.deltaShift = deltaShift; this.rcShift = rcShift; } - int markerShift, deltaShift, rcShift; + final int markerShift; + final int deltaShift; + final int rcShift; public final long setMarker(long dest, long val) { return setValue(dest, val, markerShift, MARKER_MASK); @@ -90,11 +91,11 @@ public final long getRc(long src) { return getValue(src, rcShift, RC_MASK); } - private final long setValue(long dest, long val, int offset, long mask) { + private long setValue(long dest, long val, int offset, long mask) { return (dest & (~(mask << offset))) + (val << offset); } - private final long getValue(long src, int offset, long mask) { + private long getValue(long src, int offset, long mask) { return (src >>> offset) & mask; } diff --git common/src/java/org/apache/hive/common/util/HashCodeUtil.java common/src/java/org/apache/hive/common/util/HashCodeUtil.java index 700b2e14f0..1fef649b9f 100644 --- common/src/java/org/apache/hive/common/util/HashCodeUtil.java +++ common/src/java/org/apache/hive/common/util/HashCodeUtil.java @@ -46,7 +46,7 @@ public static int calculateLongHashCode(long key) { public static void calculateLongArrayHashCodes(long[] longs, int[] hashCodes, final int count) { for (int v = 0; v < count; v++) { - hashCodes[v] = (int) calculateLongHashCode(longs[v]); + hashCodes[v] = calculateLongHashCode(longs[v]); } } @@ -82,7 +82,7 @@ public static int murmurHash(byte[] data, int offset, int length) { k = k << 8; k = k | (data[i_4 + 1] & 0xff); k = k << 8; - k = k | (data[i_4 + 0] & 0xff); + k = k | (data[i_4] & 0xff); k *= m; k ^= k >>> r; k *= m; diff --git common/src/java/org/apache/hive/common/util/HiveStringUtils.java common/src/java/org/apache/hive/common/util/HiveStringUtils.java index a4923f9f1b..4d5b987cf7 100644 --- common/src/java/org/apache/hive/common/util/HiveStringUtils.java +++ common/src/java/org/apache/hive/common/util/HiveStringUtils.java @@ -18,6 +18,16 @@ package org.apache.hive.common.util; +import com.google.common.base.Splitter; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.text.translate.CharSequenceTranslator; +import org.apache.commons.lang3.text.translate.EntityArrays; +import org.apache.commons.lang3.text.translate.LookupTranslator; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.apache.hadoop.io.Text; + import java.io.PrintWriter; import java.io.StringWriter; import java.net.InetAddress; @@ -39,16 +49,6 @@ import java.util.StringTokenizer; import java.util.regex.Pattern; -import com.google.common.base.Splitter; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang3.text.translate.CharSequenceTranslator; -import org.apache.commons.lang3.text.translate.EntityArrays; -import org.apache.commons.lang3.text.translate.LookupTranslator; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.common.classification.InterfaceAudience; -import org.apache.hadoop.hive.common.classification.InterfaceStability; -import org.apache.hadoop.io.Text; - /** * HiveStringUtils * General string utils @@ -110,7 +110,7 @@ public static String intern(String str) { if(list == null) { return null; } - List newList = new ArrayList(list.size()); + List newList = new ArrayList<>(list.size()); for(String str : list) { newList.add(intern(str)); } @@ -131,7 +131,7 @@ public static String intern(String str) { // nothing to intern return map; } - Map newMap = new HashMap(map.size()); + Map newMap = new HashMap<>(map.size()); for(Map.Entry entry : map.entrySet()) { newMap.put(intern(entry.getKey()), intern(entry.getValue())); } @@ -364,7 +364,7 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, if(values.size() == 0) { return null; } - return values.toArray(new String[values.size()]); + return values.toArray(new String[0]); } /** @@ -373,12 +373,12 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, * @return an ArrayList of string values */ public static Collection getStringCollection(String str){ - List values = new ArrayList(); + List values = new ArrayList<>(); if (str == null) { return values; } StringTokenizer tokenizer = new StringTokenizer (str,","); - values = new ArrayList(); + values = new ArrayList<>(); while (tokenizer.hasMoreTokens()) { values.add(tokenizer.nextToken()); } @@ -391,8 +391,7 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, * @return a Collection of String values */ public static Collection getTrimmedStringCollection(String str){ - return new ArrayList( - Arrays.asList(getTrimmedStrings(str))); + return new ArrayList<>(Arrays.asList(getTrimmedStrings(str))); } /** @@ -435,7 +434,7 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, if (str==null) { return null; } - ArrayList strList = new ArrayList(); + ArrayList strList = new ArrayList<>(); StringBuilder split = new StringBuilder(); int index = 0; while ((index = findNext(str, separator, escapeChar, index, split)) >= 0) { @@ -465,9 +464,9 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, if ("".equals(str)) { return new String[]{""}; } - ArrayList strList = new ArrayList(); + ArrayList strList = new ArrayList<>(); int startIndex = 0; - int nextIndex = 0; + int nextIndex; while ((nextIndex = str.indexOf((int)separator, startIndex)) != -1) { strList.add(str.substring(startIndex, nextIndex)); startIndex = nextIndex + 1; @@ -745,14 +744,8 @@ public static void startupShutdownMessage(Class clazz, String[] args, ) ); - ShutdownHookManager.addShutdownHook( - new Runnable() { - @Override - public void run() { - LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{ - "Shutting down " + classname + " at " + hostname})); - } - }, SHUTDOWN_HOOK_PRIORITY); + ShutdownHookManager.addShutdownHook(() -> LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[]{ + "Shutting down " + classname + " at " + hostname})), SHUTDOWN_HOOK_PRIORITY); } @@ -771,7 +764,7 @@ public static String getHostname() { * which can be represented by a 64-bit integer. * TraditionalBinaryPrefix symbol are case insensitive. */ - public static enum TraditionalBinaryPrefix { + public enum TraditionalBinaryPrefix { KILO(1024), MEGA(KILO.value << 10), GIGA(MEGA.value << 10), @@ -876,8 +869,8 @@ public static String escapeHTML(String string) { * Return an abbreviated English-language desc of the byte length */ public static String byteDesc(long len) { - double val = 0.0; - String ending = ""; + double val; + String ending; if (len < 1024 * 1024) { val = (1.0 * len) / 1024; ending = " KB"; @@ -928,7 +921,7 @@ public static String join(CharSequence separator, Iterable strings) { * @param separator Separator to join with. */ public static String joinIgnoringEmpty(String[] strings, char separator) { - ArrayList list = new ArrayList(); + ArrayList list = new ArrayList<>(); for(String str : strings) { if (StringUtils.isNotBlank(str)) { list.add(str); @@ -1004,7 +997,6 @@ public static int findTrailingSpaces(byte[] bytes, int start, int length) { for (numSpaces = 0; numSpaces < length; ++numSpaces) { int curPos = start + (length - (numSpaces + 1)); if (isAscii(bytes[curPos]) && Character.isWhitespace(bytes[curPos])) { - continue; } else { break; // non-space character } diff --git common/src/java/org/apache/hive/common/util/HiveTestUtils.java common/src/java/org/apache/hive/common/util/HiveTestUtils.java index 1bea47b073..d338c3c1d8 100644 --- common/src/java/org/apache/hive/common/util/HiveTestUtils.java +++ common/src/java/org/apache/hive/common/util/HiveTestUtils.java @@ -18,6 +18,13 @@ package org.apache.hive.common.util; +import com.google.common.io.Files; +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; +import org.apache.hadoop.hive.common.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; @@ -31,14 +38,6 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.hive.common.classification.InterfaceAudience; -import org.apache.hadoop.hive.common.classification.InterfaceStability; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.io.Files; - @InterfaceAudience.Private @InterfaceStability.Unstable public class HiveTestUtils { @@ -59,18 +58,15 @@ public static String getFileFromClasspath(String name) { private static void executeCmd(String[] cmdArr, File dir) throws IOException, InterruptedException { final Process p1 = Runtime.getRuntime().exec(cmdArr, null, dir); - new Thread(new Runnable() { - @Override - public void run() { - BufferedReader input = new BufferedReader(new InputStreamReader(p1.getErrorStream())); - String line; - try { - while ((line = input.readLine()) != null) { - System.out.println(line); - } - } catch (IOException e) { - LOG.error("Failed to execute the command due the exception " + e); + new Thread(() -> { + BufferedReader input = new BufferedReader(new InputStreamReader(p1.getErrorStream())); + String line; + try { + while ((line = input.readLine()) != null) { + System.out.println(line); } + } catch (IOException e) { + LOG.error("Failed to execute the command due the exception " + e); } }).start(); p1.waitFor(); @@ -78,7 +74,7 @@ public void run() { public static File genLocalJarForTest(String pathToClazzFile, String clazzName) throws IOException, InterruptedException { - return genLocalJarForTest(pathToClazzFile, clazzName, new HashMap()); + return genLocalJarForTest(pathToClazzFile, clazzName, new HashMap<>()); } public static File genLocalJarForTest(String pathToClazzFile, String clazzName, MapextraContent) diff --git common/src/java/org/apache/hive/common/util/HiveVersionInfo.java common/src/java/org/apache/hive/common/util/HiveVersionInfo.java index 9f033e77f0..7d40b3e1d8 100644 --- common/src/java/org/apache/hive/common/util/HiveVersionInfo.java +++ common/src/java/org/apache/hive/common/util/HiveVersionInfo.java @@ -18,11 +18,11 @@ package org.apache.hive.common.util; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hive.common.HiveVersionAnnotation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * HiveVersionInfo. @@ -33,8 +33,8 @@ public class HiveVersionInfo { private static final Logger LOG = LoggerFactory.getLogger(HiveVersionInfo.class); - private static Package myPackage; - private static HiveVersionAnnotation version; + private static final Package myPackage; + private static final HiveVersionAnnotation version; static { myPackage = HiveVersionAnnotation.class.getPackage(); diff --git common/src/java/org/apache/hive/common/util/ProcessUtils.java common/src/java/org/apache/hive/common/util/ProcessUtils.java index 409384fc00..075f58301f 100644 --- common/src/java/org/apache/hive/common/util/ProcessUtils.java +++ common/src/java/org/apache/hive/common/util/ProcessUtils.java @@ -18,18 +18,18 @@ package org.apache.hive.common.util; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.lang.management.ManagementFactory; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * Process related utilities. */ public class ProcessUtils { - private static Logger LOG = LoggerFactory.getLogger(ProcessUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(ProcessUtils.class); public static Integer getPid() { // JVM_PID is exported by bin/hive diff --git common/src/java/org/apache/hive/common/util/Ref.java common/src/java/org/apache/hive/common/util/Ref.java index c3f3a2c6ca..0a8a4ad887 100644 --- common/src/java/org/apache/hive/common/util/Ref.java +++ common/src/java/org/apache/hive/common/util/Ref.java @@ -27,6 +27,6 @@ public Ref(T value) { } public static Ref from(T t) { - return new Ref(t); + return new Ref<>(t); } } diff --git common/src/java/org/apache/hive/common/util/ReflectionUtil.java common/src/java/org/apache/hive/common/util/ReflectionUtil.java index 6e3b882b37..84bf7f3d9c 100644 --- common/src/java/org/apache/hive/common/util/ReflectionUtil.java +++ common/src/java/org/apache/hive/common/util/ReflectionUtil.java @@ -18,15 +18,14 @@ package org.apache.hive.common.util; -import java.lang.reflect.Constructor; -import java.lang.reflect.Method; -import java.util.concurrent.TimeUnit; - +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; +import java.lang.reflect.Constructor; +import java.lang.reflect.Method; +import java.util.concurrent.TimeUnit; /** * Same as Hadoop ReflectionUtils, but (1) does not leak classloaders (or shouldn't anyway, we diff --git common/src/java/org/apache/hive/common/util/RetryUtilities.java common/src/java/org/apache/hive/common/util/RetryUtilities.java index 12fe2b7018..63d223cad8 100644 --- common/src/java/org/apache/hive/common/util/RetryUtilities.java +++ common/src/java/org/apache/hive/common/util/RetryUtilities.java @@ -17,8 +17,6 @@ */ package org.apache.hive.common.util; -import java.util.concurrent.Callable; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,7 +36,7 @@ public RetryException(String msg) { /** * Interface used to create a ExponentialBackOffRetry policy */ - public static interface ExponentialBackOffRetry { + public interface ExponentialBackOffRetry { /** * This method should be called by implementations of this ExponentialBackOffRetry policy * It represents the actual work which needs to be done based on a given batch size @@ -46,7 +44,7 @@ public RetryException(String msg) { * @return * @throws Exception */ - public T execute(int batchSize) throws Exception; + T execute(int batchSize) throws Exception; } /** diff --git common/src/java/org/apache/hive/common/util/ShutdownHookManager.java common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index fd12b649bb..495303c55e 100644 --- common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -18,17 +18,16 @@ package org.apache.hive.common.util; +import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.fs.FileSystem; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.util.Collections; import java.util.HashSet; import java.util.Set; -import org.apache.hadoop.fs.FileSystem; - -import com.google.common.annotations.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * This is just a wrapper around hadoop's ShutdownHookManager but also manages delete on exit hook for temp files. */ @@ -118,7 +117,7 @@ static boolean isRegisteredToDeleteOnExit(File file) { } private static class DeleteOnExitHook implements Runnable { - private final Set deleteTargets = Collections.synchronizedSet(new HashSet()); + private final Set deleteTargets = Collections.synchronizedSet(new HashSet<>()); @Override public void run() { diff --git common/src/java/org/apache/hive/common/util/StreamPrinter.java common/src/java/org/apache/hive/common/util/StreamPrinter.java index 8ff0e76520..58e9a1cb24 100644 --- common/src/java/org/apache/hive/common/util/StreamPrinter.java +++ common/src/java/org/apache/hive/common/util/StreamPrinter.java @@ -18,22 +18,22 @@ package org.apache.hive.common.util; +import org.apache.hadoop.io.IOUtils; + import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintStream; -import org.apache.hadoop.io.IOUtils; - /** * StreamPrinter. * */ public class StreamPrinter extends Thread { - InputStream is; - String type; - PrintStream[] outputStreams; + final InputStream is; + final String type; + final PrintStream[] outputStreams; public StreamPrinter(InputStream is, String type, PrintStream... outputStreams) { this.is = is; @@ -47,7 +47,7 @@ public void run() { try { InputStreamReader isr = new InputStreamReader(is); br = new BufferedReader(isr); - String line = null; + String line; if (type != null) { while ((line = br.readLine()) != null) { for (PrintStream os: outputStreams) { diff --git common/src/java/org/apache/hive/common/util/TimestampParser.java common/src/java/org/apache/hive/common/util/TimestampParser.java index d30ab88892..fee0ea146d 100644 --- common/src/java/org/apache/hive/common/util/TimestampParser.java +++ common/src/java/org/apache/hive/common/util/TimestampParser.java @@ -18,17 +18,11 @@ package org.apache.hive.common.util; -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import org.apache.hadoop.hive.common.type.Timestamp; import org.joda.time.DateTime; +import org.joda.time.DateTimeFieldType; import org.joda.time.LocalDateTime; import org.joda.time.MutableDateTime; -import org.joda.time.DateTimeFieldType; import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; @@ -36,7 +30,11 @@ import org.joda.time.format.DateTimeParser; import org.joda.time.format.DateTimeParserBucket; -import javax.annotation.Nullable; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import static com.google.common.base.Preconditions.checkState; @@ -141,12 +139,8 @@ public Timestamp parseTimestamp(String strValue) throws IllegalArgumentException * DateTimeParser to parse the date string as the millis since Unix epoch */ public static class MillisDateFormatParser implements DateTimeParser { - private static final ThreadLocal numericMatcher = new ThreadLocal() { - @Override - protected Matcher initialValue() { - return Pattern.compile("(-?\\d+)(\\.\\d+)?$").matcher(""); - } - }; + private static final ThreadLocal numericMatcher = + ThreadLocal.withInitial(() -> Pattern.compile("(-?\\d+)(\\.\\d+)?$").matcher("")); private final static DateTimeFieldType[] dateTimeFields = { DateTimeFieldType.year(), diff --git common/src/java/org/apache/hive/http/AdminAuthorizedServlet.java common/src/java/org/apache/hive/http/AdminAuthorizedServlet.java index de9b69647a..a2ea44bf15 100644 --- common/src/java/org/apache/hive/http/AdminAuthorizedServlet.java +++ common/src/java/org/apache/hive/http/AdminAuthorizedServlet.java @@ -17,13 +17,12 @@ */ package org.apache.hive.http; -import java.io.IOException; +import org.eclipse.jetty.servlet.DefaultServlet; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - -import org.eclipse.jetty.servlet.DefaultServlet; +import java.io.IOException; /** * General servlet which is admin-authorized. diff --git common/src/java/org/apache/hive/http/ConfServlet.java common/src/java/org/apache/hive/http/ConfServlet.java index 81643ce9bf..03562e4904 100644 --- common/src/java/org/apache/hive/http/ConfServlet.java +++ common/src/java/org/apache/hive/http/ConfServlet.java @@ -17,16 +17,15 @@ */ package org.apache.hive.http; -import java.io.IOException; -import java.io.Writer; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConfUtil; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConfUtil; +import java.io.IOException; +import java.io.Writer; /** * A servlet to print out the running configuration data. diff --git common/src/java/org/apache/hive/http/HttpServer.java common/src/java/org/apache/hive/http/HttpServer.java index 24c5422a18..75f605bbe9 100644 --- common/src/java/org/apache/hive/http/HttpServer.java +++ common/src/java/org/apache/hive/http/HttpServer.java @@ -18,37 +18,21 @@ package org.apache.hive.http; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.servlet.ServletContext; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import com.google.common.base.Preconditions; - +import com.google.common.base.Splitter; +import com.google.common.base.Strings; +import com.google.common.collect.Sets; import org.apache.commons.lang.StringUtils; import org.apache.commons.math3.util.Pair; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authorize.AccessControlList; -import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.security.http.CrossOriginFilter; import org.apache.hive.http.security.PamAuthenticator; import org.apache.hive.http.security.PamConstraint; @@ -72,9 +56,9 @@ import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.LowResourceMonitor; import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.handler.ContextHandler.Context; import org.eclipse.jetty.server.handler.ContextHandlerCollection; -import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.FilterMapping; @@ -85,12 +69,25 @@ import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.webapp.WebAppContext; - -import com.google.common.base.Splitter; -import com.google.common.base.Strings; -import com.google.common.collect.Sets; import org.slf4j.LoggerFactory; +import javax.servlet.ServletContext; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; + /** * A simple embedded Jetty server to serve as HS2/HMS web UI. */ @@ -121,7 +118,7 @@ private HttpServer(final Builder b) throws IOException { private int port; private int maxThreads; private HiveConf conf; - private final Map contextAttrs = new HashMap(); + private final Map contextAttrs = new HashMap<>(); private String keyStorePassword; private String keyStorePath; private String spnegoPrincipal; @@ -135,8 +132,7 @@ private HttpServer(final Builder b) throws IOException { private String allowedHeaders; private PamAuthenticator pamAuthenticator; private String contextRootRewriteTarget = "/index.html"; - private final List>> servlets = - new LinkedList>>(); + private final List>> servlets = new LinkedList<>(); public Builder(String name) { Preconditions.checkArgument(name != null && !name.isEmpty(), "Name must be specified"); @@ -248,7 +244,7 @@ public Builder setContextRootRewriteTarget(String contextRootRewriteTarget) { } public Builder addServlet(String endpoint, Class servlet) { - servlets.add(new Pair>(endpoint, servlet)); + servlets.add(new Pair<>(endpoint, servlet)); return this; } } @@ -414,7 +410,7 @@ WebAppContext createWebAppContext(Builder b) { * Secure the web server with kerberos (AuthenticationFilter). */ void setupSpnegoFilter(Builder b) throws IOException { - Map params = new HashMap(); + Map params = new HashMap<>(); params.put("kerberos.principal", SecurityUtil.getServerPrincipal(b.spnegoPrincipal, b.host)); params.put("kerberos.keytab", b.spnegoKeytab); diff --git common/src/java/org/apache/hive/http/JMXJsonServlet.java common/src/java/org/apache/hive/http/JMXJsonServlet.java index 7b2f89e3e1..81fd688675 100644 --- common/src/java/org/apache/hive/http/JMXJsonServlet.java +++ common/src/java/org/apache/hive/http/JMXJsonServlet.java @@ -17,12 +17,10 @@ package org.apache.hive.http; -import java.io.IOException; -import java.io.PrintWriter; -import java.lang.management.ManagementFactory; -import java.lang.reflect.Array; -import java.util.Iterator; -import java.util.Set; +import org.codehaus.jackson.JsonFactory; +import org.codehaus.jackson.JsonGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; @@ -43,11 +41,11 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - -import org.codehaus.jackson.JsonFactory; -import org.codehaus.jackson.JsonGenerator; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import java.io.IOException; +import java.io.PrintWriter; +import java.lang.management.ManagementFactory; +import java.lang.reflect.Array; +import java.util.Set; /* * This servlet is based off of the JMXProxyServlet from Tomcat 7.0.14. It has @@ -214,15 +212,13 @@ private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, HttpServletResponse response) throws IOException { LOG.debug("Listing beans for "+qry); - Set names = null; + Set names; names = mBeanServer.queryNames(qry, null); jg.writeArrayFieldStart("beans"); - Iterator it = names.iterator(); - while (it.hasNext()) { - ObjectName oname = it.next(); + for (ObjectName oname : names) { MBeanInfo minfo; - String code = ""; + String code; Object attributeinfo = null; try { minfo = mBeanServer.getMBeanInfo(oname); @@ -233,47 +229,41 @@ private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, prs = "modelerType"; code = (String) mBeanServer.getAttribute(oname, prs); } - if (attribute!=null) { + if (attribute != null) { prs = attribute; attributeinfo = mBeanServer.getAttribute(oname, prs); } } catch (AttributeNotFoundException e) { // If the modelerType attribute was not found, the class name is used // instead. - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("getting attribute " + prs + " of " + oname + " threw an exception", e); } catch (MBeanException e) { // The code inside the attribute getter threw an exception so log it, // and fall back on the class name - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("getting attribute " + prs + " of " + oname + " threw an exception", e); } catch (RuntimeException e) { // For some reason even with an MBeanException available to them // Runtime exceptions can still find their way through, so treat them // the same as MBeanException - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); - } catch ( ReflectionException e ) { + LOG.error("getting attribute " + prs + " of " + oname + " threw an exception", e); + } catch (ReflectionException e) { // This happens when the code inside the JMX bean (setter?? from the // java docs) threw an exception, so log it and fall back on the // class name - LOG.error("getting attribute " + prs + " of " + oname - + " threw an exception", e); + LOG.error("getting attribute " + prs + " of " + oname + " threw an exception", e); } } catch (InstanceNotFoundException e) { //Ignored for some reason the bean was not found so don't output it continue; - } catch ( IntrospectionException e ) { + } catch (IntrospectionException e) { // This is an internal error, something odd happened with reflection so // log it and don't output the bean. - LOG.error("Problem while trying to process JMX query: " + qry - + " with MBean " + oname, e); + LOG.error("Problem while trying to process JMX query: " + qry + " with MBean " + oname, e); continue; - } catch ( ReflectionException e ) { + } catch (ReflectionException e) { // This happens when the code inside the JMX bean threw an exception, so // log it and don't output the bean. - LOG.error("Problem while trying to process JMX query: " + qry - + " with MBean " + oname, e); + LOG.error("Problem while trying to process JMX query: " + qry + " with MBean " + oname, e); continue; } @@ -283,8 +273,7 @@ private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, jg.writeStringField("modelerType", code); if ((attribute != null) && (attributeinfo == null)) { jg.writeStringField("result", "ERROR"); - jg.writeStringField("message", "No attribute with name " + attribute - + " was found."); + jg.writeStringField("message", "No attribute with name " + attribute + " was found."); jg.writeEndObject(); jg.writeEndArray(); jg.close(); @@ -296,8 +285,8 @@ private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, writeAttribute(jg, attribute, attributeinfo); } else { MBeanAttributeInfo attrs[] = minfo.getAttributes(); - for (int i = 0; i < attrs.length; i++) { - writeAttribute(jg, oname, attrs[i]); + for (MBeanAttributeInfo attr : attrs) { + writeAttribute(jg, oname, attr); } } jg.writeEndObject(); @@ -313,11 +302,11 @@ private void writeAttribute(JsonGenerator jg, ObjectName oname, MBeanAttributeIn if ("modelerType".equals(attName)) { return; } - if (attName.indexOf("=") >= 0 || attName.indexOf(":") >= 0 - || attName.indexOf(" ") >= 0) { + if (attName.contains("=") || attName.contains(":") + || attName.contains(" ")) { return; } - Object value = null; + Object value; try { value = mBeanServer.getAttribute(oname, attName); } catch (RuntimeMBeanException e) { diff --git common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java index b8cbcf3c0a..c2faf5c391 100644 --- common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java +++ common/src/java/org/apache/hive/http/Log4j2ConfiguratorServlet.java @@ -15,18 +15,6 @@ */ package org.apache.hive.http; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.stream.Collectors; - -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LoggerContext; @@ -36,6 +24,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + /** * A servlet to configure log4j2. *
@@ -153,7 +152,7 @@ public ConfLogger(String logger, String level) { } public String getLogger() { - return logger == null ? logger : logger.trim(); + return logger == null ? null : logger.trim(); } public void setLogger(final String logger) { @@ -161,7 +160,7 @@ public void setLogger(final String logger) { } public String getLevel() { - return level == null ? level : level.trim().toUpperCase(); + return level == null ? null : level.trim().toUpperCase(); } public void setLevel(final String level) { @@ -257,9 +256,7 @@ private void configureLogger(final ConfLoggers confLoggers) { } private void listLoggers(final HttpServletResponse response) throws IOException { - PrintWriter writer = null; - try { - writer = response.getWriter(); + try (PrintWriter writer = response.getWriter()) { ConfLoggers confLoggers = new ConfLoggers(); Collection loggerConfigs = conf.getLoggers().values(); loggerConfigs.forEach(lc -> confLoggers.getLoggers().add(new ConfLogger(lc.getName(), lc.getLevel().toString()))); @@ -269,10 +266,6 @@ private void listLoggers(final HttpServletResponse response) throws IOException LOG.error("Caught an exception while processing Log4j2 configuration request", e); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; - } finally { - if (writer != null) { - writer.close(); - } } response.setStatus(HttpServletResponse.SC_OK); } diff --git common/src/java/org/apache/hive/http/ProfileOutputServlet.java common/src/java/org/apache/hive/http/ProfileOutputServlet.java index fdca1f3cb3..0ac5f62b56 100644 --- common/src/java/org/apache/hive/http/ProfileOutputServlet.java +++ common/src/java/org/apache/hive/http/ProfileOutputServlet.java @@ -15,16 +15,15 @@ */ package org.apache.hive.http; -import java.io.File; -import java.io.IOException; +import org.eclipse.jetty.servlet.DefaultServlet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - -import org.eclipse.jetty.servlet.DefaultServlet; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import java.io.File; +import java.io.IOException; /** * Servlet to serve files generated by {@link ProfileServlet} diff --git common/src/java/org/apache/hive/http/ProfileServlet.java common/src/java/org/apache/hive/http/ProfileServlet.java index 48437563b4..7109ca8807 100644 --- common/src/java/org/apache/hive/http/ProfileServlet.java +++ common/src/java/org/apache/hive/http/ProfileServlet.java @@ -15,6 +15,14 @@ */ package org.apache.hive.http; +import com.google.common.base.Joiner; +import org.apache.hive.common.util.ProcessUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import java.io.File; import java.io.IOException; import java.util.ArrayList; @@ -24,16 +32,6 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.hive.common.util.ProcessUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.base.Joiner; - /** * Servlet that runs async-profiler as web-endpoint. * Following options from async-profiler can be specified as query paramater. @@ -112,7 +110,7 @@ MEM_BREAKPOINT("mem:breakpoint"), TRACE_TRACEPOINT("trace:tracepoint"),; - private String internalName; + private final String internalName; Event(final String internalName) { this.internalName = internalName; @@ -143,9 +141,9 @@ public static Event fromInternalName(final String name) { JFR } - private Lock profilerLock = new ReentrantLock(); + private final Lock profilerLock = new ReentrantLock(); private Integer pid; - private String asyncProfilerHome; + private final String asyncProfilerHome; private Process process; public ProfileServlet() { diff --git common/src/java/org/apache/hive/http/StackServlet.java common/src/java/org/apache/hive/http/StackServlet.java index 0960cc99d7..7da028a31c 100644 --- common/src/java/org/apache/hive/http/StackServlet.java +++ common/src/java/org/apache/hive/http/StackServlet.java @@ -17,23 +17,22 @@ */ package org.apache.hive.http; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintStream; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - /** * A servlet to print out the current stack traces. */ public class StackServlet extends HttpServlet { private static final long serialVersionUID = 1L; - private static ThreadMXBean threadBean = + private static final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); @Override diff --git common/src/java/org/apache/hive/http/security/PamAuthenticator.java common/src/java/org/apache/hive/http/security/PamAuthenticator.java index cbc19dd816..a7f72891ec 100644 --- common/src/java/org/apache/hive/http/security/PamAuthenticator.java +++ common/src/java/org/apache/hive/http/security/PamAuthenticator.java @@ -16,6 +16,7 @@ */ package org.apache.hive.http.security; +import net.sf.jpam.Pam; import org.apache.hadoop.hive.conf.HiveConf; import org.eclipse.jetty.http.HttpHeader; import org.eclipse.jetty.security.ServerAuthException; @@ -34,8 +35,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import net.sf.jpam.Pam; - /* This class authenticates HS2 web UI via PAM. To authenticate use diff --git common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java index b135be82a2..b30817a9aa 100644 --- common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java +++ common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java @@ -22,14 +22,15 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.net.URI; -import static org.apache.hadoop.hive.common.BlobStorageUtils.*; +import static org.apache.hadoop.hive.common.BlobStorageUtils.isBlobStorageFileSystem; +import static org.apache.hadoop.hive.common.BlobStorageUtils.isBlobStoragePath; +import static org.apache.hadoop.hive.common.BlobStorageUtils.isBlobStorageScheme; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doReturn; diff --git common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java index b45832ede7..20aa7b9252 100644 --- common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java +++ common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java @@ -18,38 +18,35 @@ package org.apache.hadoop.hive.common; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - +import com.google.common.collect.Sets; +import com.google.common.io.Files; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.shims.HadoopShims; - import org.junit.Assert; import org.junit.Test; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Sets; -import com.google.common.io.Files; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TestFileUtils { diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java index 2ec470b371..a79462f88f 100644 --- common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestOp.java @@ -18,14 +18,15 @@ package org.apache.hadoop.hive.common.jsonexplain; -import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.hadoop.hive.common.jsonexplain.tez.TezJsonParser; import org.json.JSONObject; import org.junit.Before; import org.junit.Test; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import static org.junit.Assert.assertEquals; diff --git common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java index 2f21caf188..2122c1130d 100644 --- common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java +++ common/src/test/org/apache/hadoop/hive/common/jsonexplain/TestStage.java @@ -28,7 +28,10 @@ import java.util.List; import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; public class TestStage { diff --git common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java index 88a4c73006..173be5c07c 100644 --- common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java +++ common/src/test/org/apache/hadoop/hive/common/metrics/MetricsTestUtils.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hive.common.metrics; -import com.codahale.metrics.Meter; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.Assert; @@ -37,8 +36,8 @@ public static final MetricsCategory METER = new MetricsCategory("meters", "count"); static class MetricsCategory { - String category; - String metricsHandle; + final String category; + final String metricsHandle; MetricsCategory(String category, String metricsHandle) { this.category = category; this.metricsHandle = metricsHandle; diff --git common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java index 1d477f6a47..53cb34fd40 100644 --- common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java +++ common/src/test/org/apache/hadoop/hive/common/metrics/TestLegacyMetrics.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hive.common.metrics; -import java.lang.management.ManagementFactory; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; +import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; +import org.apache.hadoop.hive.conf.HiveConf; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import javax.management.Attribute; import javax.management.MBeanAttributeInfo; @@ -29,13 +29,17 @@ import javax.management.MBeanOperationInfo; import javax.management.MBeanServer; import javax.management.ObjectName; +import java.lang.management.ManagementFactory; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; -import org.apache.hadoop.hive.conf.HiveConf; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; public class TestLegacyMetrics { @@ -78,7 +82,7 @@ public void testMetricsMBean() throws Exception { assertTrue(resetFound); // add metric with a non-null value: - Attribute attr = new Attribute("fooMetric", Long.valueOf(-77)); + Attribute attr = new Attribute("fooMetric", (long) -77); mbs.setAttribute(oname, attr); mBeanInfo = mbs.getMBeanInfo(oname); @@ -100,7 +104,7 @@ public void testMetricsMBean() throws Exception { // check metric value: Object v = mbs.getAttribute(oname, "fooMetric"); - assertEquals(Long.valueOf(-77), v); + assertEquals((long) -77, v); // reset the bean: Object result = mbs.invoke(oname, "reset", new Object[0], new String[0]); @@ -108,7 +112,7 @@ public void testMetricsMBean() throws Exception { // the metric value must be zeroed: v = mbs.getAttribute(oname, "fooMetric"); - assertEquals(Long.valueOf(0), v); + assertEquals(0L, v); } @Test @@ -130,7 +134,7 @@ public void testScopeSingleThread() throws Exception { metrics.endStoredScope(scopeName); assertEquals(Long.valueOf(1), fooScope.getNumCounter()); - final long t1 = fooScope.getTimeCounter().longValue(); + final long t1 = fooScope.getTimeCounter(); assertTrue(t1 > periodMs); assertSame(fooScope, metrics.getStoredScope(scopeName)); @@ -147,16 +151,16 @@ public void testScopeSingleThread() throws Exception { fooScope.reopen(); assertEquals(Long.valueOf(2), fooScope.getNumCounter()); - assertTrue(fooScope.getTimeCounter().longValue() > 2 * periodMs); + assertTrue(fooScope.getTimeCounter() > 2 * periodMs); Thread.sleep(periodMs + 1); // 3rd close: fooScope.close(); assertEquals(Long.valueOf(3), fooScope.getNumCounter()); - assertTrue(fooScope.getTimeCounter().longValue() > 3 * periodMs); + assertTrue(fooScope.getTimeCounter() > 3 * periodMs); Double avgT = (Double) metrics.get("foo.avg_t"); - assertTrue(avgT.doubleValue() > periodMs); + assertTrue(avgT > periodMs); } @Test @@ -167,12 +171,9 @@ public void testScopeConcurrency() throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(threads); for (int i=0; i() { - @Override - public Void call() throws Exception { - testScopeImpl(n); - return null; - } + executorService.submit((Callable) () -> { + testScopeImpl(n); + return null; }); } executorService.shutdown(); @@ -180,9 +181,9 @@ public Void call() throws Exception { fooScope = (LegacyMetrics.LegacyMetricsScope) metrics.getStoredScope(scopeName); assertEquals(Long.valueOf(3 * threads), fooScope.getNumCounter()); - assertTrue(fooScope.getTimeCounter().longValue() > 3 * periodMs * threads); + assertTrue(fooScope.getTimeCounter() > 3 * periodMs * threads); Double avgT = (Double) metrics.get("foo.avg_t"); - assertTrue(avgT.doubleValue() > periodMs); + assertTrue(avgT > periodMs); metrics.endStoredScope(scopeName); } @@ -220,8 +221,8 @@ void testScopeImpl(int n) throws Exception { // 1st close: metrics.endStoredScope(scopeName); // closing of open scope should be ok. - assertTrue(fooScope.getNumCounter().longValue() >= 1); - final long t1 = fooScope.getTimeCounter().longValue(); + assertTrue(fooScope.getNumCounter() >= 1); + final long t1 = fooScope.getTimeCounter(); assertTrue(t1 > periodMs); assertSame(fooScope, metrics.getStoredScope(scopeName)); @@ -229,24 +230,24 @@ void testScopeImpl(int n) throws Exception { // opening allowed after closing: metrics.startStoredScope(scopeName); - assertTrue(fooScope.getNumCounter().longValue() >= 1); - assertTrue(fooScope.getTimeCounter().longValue() >= t1); + assertTrue(fooScope.getNumCounter() >= 1); + assertTrue(fooScope.getTimeCounter() >= t1); assertSame(fooScope, metrics.getStoredScope(scopeName)); Thread.sleep(periodMs + 1); // Reopening (close + open) allowed in opened state: fooScope.reopen(); - assertTrue(fooScope.getNumCounter().longValue() >= 2); - assertTrue(fooScope.getTimeCounter().longValue() > 2 * periodMs); + assertTrue(fooScope.getNumCounter() >= 2); + assertTrue(fooScope.getTimeCounter() > 2 * periodMs); Thread.sleep(periodMs + 1); // 3rd close: fooScope.close(); - assertTrue(fooScope.getNumCounter().longValue() >= 3); - assertTrue(fooScope.getTimeCounter().longValue() > 3 * periodMs); + assertTrue(fooScope.getNumCounter() >= 3); + assertTrue(fooScope.getTimeCounter() > 3 * periodMs); Double avgT = (Double) metrics.get("foo.avg_t"); - assertTrue(avgT.doubleValue() > periodMs); + assertTrue(avgT > periodMs); } } diff --git common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java index 1c49d9575f..77858f9aaf 100644 --- common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java +++ common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleMetrics.java @@ -123,13 +123,10 @@ public void testConcurrency() throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(threads); for (int i=0; i< threads; i++) { final int n = i; - executorService.submit(new Callable() { - @Override - public Void call() throws Exception { - MetricsFactory.getInstance().startStoredScope("method2"); - MetricsFactory.getInstance().endStoredScope("method2"); - return null; - } + executorService.submit((Callable) () -> { + MetricsFactory.getInstance().startStoredScope("method2"); + MetricsFactory.getInstance().endStoredScope("method2"); + return null; }); } executorService.shutdown(); @@ -171,7 +168,7 @@ public Object getValue() { public void setValue(int gaugeVal) { this.gaugeVal = gaugeVal; } - }; + } @Test public void testGauge() throws Exception { diff --git common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java index e89a605bb3..1287dd05d5 100644 --- common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java +++ common/src/test/org/apache/hadoop/hive/common/metrics/metrics2/TestCodahaleReportersConf.java @@ -19,7 +19,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import java.lang.reflect.InvocationTargetException; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.hive.common.metrics.MetricsTestUtils; import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; @@ -29,13 +28,14 @@ import org.junit.Test; import java.io.File; +import java.lang.reflect.InvocationTargetException; /** * Unit tests for Codahale reporter config backward compatibility */ public class TestCodahaleReportersConf { - private static File workDir = new File(System.getProperty("test.tmp.dir")); + private static final File workDir = new File(System.getProperty("test.tmp.dir")); private static File jsonReportFile; @After diff --git common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java index 8b907da7a2..ed09e6f052 100644 --- common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java +++ common/src/test/org/apache/hadoop/hive/common/type/HiveDecimalTestBase.java @@ -17,27 +17,25 @@ */ package org.apache.hadoop.hive.common.type; -import java.util.Random; import java.math.BigDecimal; import java.math.BigInteger; - -import org.apache.hadoop.hive.common.type.RandomTypeUtil; +import java.util.Random; // A COPY of the one in storage-api since currently sharing test classes isn't supported in // our build. public class HiveDecimalTestBase { - public static int POUND_FACTOR = 1000; + public static final int POUND_FACTOR = 1000; - public static enum BigDecimalFlavor { + public enum BigDecimalFlavor { NORMAL_RANGE, FRACTIONS_ONLY, NEGATIVE_SCALE, LONG_TAIL } - public static enum BigDecimalPairFlavor { + public enum BigDecimalPairFlavor { RANDOM, NEAR, INVERSE @@ -88,7 +86,6 @@ public BigDecimal randHiveBigDecimalNormalRange(Random r, String digitAlphabet) boolean negated = false; if (r.nextBoolean()) { bigInteger = bigInteger.negate(); - negated = true; } int scale = 0 + r.nextInt(38 + 1); return new BigDecimal(bigInteger, scale); @@ -100,7 +97,6 @@ public BigDecimal randHiveBigDecimalNegativeScale(Random r, String digitAlphabet boolean negated = false; if (r.nextBoolean()) { bigInteger = bigInteger.negate(); - negated = true; } int scale = 0 + (r.nextBoolean() ? 0 : r.nextInt(38 + 1)); if (r.nextBoolean()) { @@ -117,7 +113,6 @@ public BigDecimal randHiveBigDecimalLongTail(Random r, String digitAlphabet) { boolean negated = false; if (r.nextBoolean()) { bigInteger = bigInteger.negate(); - negated = true; } return new BigDecimal(bigInteger, scale); } @@ -129,7 +124,6 @@ public BigDecimal randHiveBigDecimalFractionsOnly(Random r, String digitAlphabet boolean negated = false; if (r.nextBoolean()) { bigInteger = bigInteger.negate(); - negated = true; } return new BigDecimal(bigInteger, scale); } @@ -158,7 +152,6 @@ public BigInteger randHiveBigInteger(Random r, String digitAlphabet) { boolean negated = false; if (r.nextBoolean()) { bigInteger = bigInteger.negate(); - negated = true; } return bigInteger; } @@ -182,7 +175,7 @@ public boolean isTenPowerBug(String string) { //------------------------------------------------------------------------------------------------ - public static String[] specialDecimalStrings = new String[] { + public static final String[] specialDecimalStrings = new String[] { "0", "1", "-1", @@ -455,7 +448,7 @@ public boolean isTenPowerBug(String string) { "234.79" }; - public static BigDecimal[] specialBigDecimals = stringArrayToBigDecimals(specialDecimalStrings); + public static final BigDecimal[] specialBigDecimals = stringArrayToBigDecimals(specialDecimalStrings); // decimal_1_1.txt public static String[] decimal_1_1_txt = { @@ -533,9 +526,9 @@ public boolean isTenPowerBug(String string) { "1234567890.1234567800" }; - public static String standardAlphabet = "0123456789"; + public static final String standardAlphabet = "0123456789"; - public static String[] sparseAlphabets = new String[] { + public static final String[] sparseAlphabets = new String[] { "0000000000000000000000000000000000000003", "0000000000000000000000000000000000000009", diff --git common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java index fcbda1ec63..d53dd207c1 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestDecimal128.java @@ -17,16 +17,18 @@ */ package org.apache.hadoop.hive.common.type; -import static org.junit.Assert.*; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.Random; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; /** diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java index 941e9e3560..21dfb7c513 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveBaseChar.java @@ -18,12 +18,12 @@ package org.apache.hadoop.hive.common.type; -import java.util.Random; - import junit.framework.TestCase; +import java.util.Random; + public class TestHiveBaseChar extends TestCase { - static Random rnd = new Random(); + static final Random rnd = new Random(); public static int getRandomSupplementaryChar() { int lowSurrogate = 0xDC00 + rnd.nextInt(1024); @@ -66,9 +66,8 @@ public void testStringLength() throws Exception { int strLen = 20; int[] lengths = { 15, 20, 25 }; // Try with supplementary characters - for (int idx1 = 0; idx1 < lengths.length; ++idx1) { + for (int curLen : lengths) { // Create random test string - int curLen = lengths[idx1]; String testString = createRandomSupplementaryCharString(curLen); assertEquals(curLen, testString.codePointCount(0, testString.length())); String enforcedString = HiveBaseChar.enforceMaxLength(testString, strLen); @@ -86,8 +85,7 @@ public void testStringLength() throws Exception { public void testGetPaddedValue() { int strLen = 20; int[] lengths = { 15, 20, 25 }; - for (int idx1 = 0; idx1 < lengths.length; ++idx1) { - int curLen = lengths[idx1]; + for (int curLen : lengths) { // Random test string String testString = createRandomSupplementaryCharString(curLen); assertEquals(curLen, testString.codePointCount(0, testString.length())); diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java index fbee547ae3..555926249c 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveChar.java @@ -18,10 +18,15 @@ package org.apache.hadoop.hive.common.type; -import com.google.code.tempusfugit.concurrency.annotations.*; -import com.google.code.tempusfugit.concurrency.*; -import org.junit.*; -import static org.junit.Assert.*; +import com.google.code.tempusfugit.concurrency.ConcurrentRule; +import com.google.code.tempusfugit.concurrency.RepeatingRule; +import com.google.code.tempusfugit.concurrency.annotations.Concurrent; +import com.google.code.tempusfugit.concurrency.annotations.Repeating; +import org.junit.Rule; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; public class TestHiveChar { diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java index 1435339956..ddf45b73bd 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimalOrcSerializationUtils.java @@ -17,22 +17,22 @@ */ package org.apache.hadoop.hive.common.type; -import java.util.Random; +import com.google.code.tempusfugit.concurrency.annotations.Concurrent; +import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; +import org.apache.orc.impl.SerializationUtils; +import org.junit.Assert; +import org.junit.Test; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; +import java.util.Random; -import org.apache.orc.impl.SerializationUtils; -import org.apache.hadoop.hive.common.type.RandomTypeUtil; -import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr; - -import com.google.code.tempusfugit.concurrency.annotations.*; - -import org.junit.*; - -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; public class TestHiveDecimalOrcSerializationUtils extends HiveDecimalTestBase { @@ -116,11 +116,9 @@ private void testSerializationUtilsWriteRead(String string) { // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER bytesExpected " + displayBytes(bytesExpected, 0, bytesExpected.length)); } // Deserialize and check... - which = 1; ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); BigInteger deserializedBigInteger = SerializationUtils.readBigInteger(byteArrayInputStream); - which = 2; ByteArrayInputStream byteArrayInputStreamExpected = new ByteArrayInputStream(bytesExpected); BigInteger deserializedBigIntegerExpected = SerializationUtils.readBigInteger(byteArrayInputStreamExpected); // System.out.println("TEST_FAST_SERIALIZATION_UTILS_WRITE_BIG_INTEGER deserialized equals " + @@ -131,7 +129,6 @@ private void testSerializationUtilsWriteRead(String string) { fail(); } - which = 3; ByteArrayInputStream byteArrayInputStreamRead = new ByteArrayInputStream(bytes); byte[] scratchBytes = new byte[HiveDecimal.SCRATCH_BUFFER_LEN_SERIALIZATION_UTILS_READ]; HiveDecimal readHiveDecimal = @@ -227,7 +224,7 @@ private void doTestSerializationUtilsRead(Random r, BigInteger bigInteger) byteArrayInputStream = new ByteArrayInputStream(bytes); HiveDecimal resultDec = - dec.serializationUtilsRead( + HiveDecimal.serializationUtilsRead( byteArrayInputStream, dec.scale(), scratchBytes); assertTrue(resultDec != null); @@ -260,7 +257,7 @@ private void doTestSerializationUtilsRead(Random r, BigInteger bigInteger) // Now HiveDecimal byteArrayInputStream = new ByteArrayInputStream(bytes); resultDec = - dec.serializationUtilsRead( + HiveDecimal.serializationUtilsRead( byteArrayInputStream, dec.scale(), scratchBytes); assertTrue(resultDec != null); diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java index 2684c13e55..badc1d306a 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalDayTime.java @@ -17,11 +17,17 @@ */ package org.apache.hadoop.hive.common.type; -import org.junit.*; - -import static org.junit.Assert.*; -import com.google.code.tempusfugit.concurrency.annotations.*; -import com.google.code.tempusfugit.concurrency.*; +import com.google.code.tempusfugit.concurrency.ConcurrentRule; +import com.google.code.tempusfugit.concurrency.RepeatingRule; +import com.google.code.tempusfugit.concurrency.annotations.Concurrent; +import com.google.code.tempusfugit.concurrency.annotations.Repeating; +import org.junit.Rule; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; public class TestHiveIntervalDayTime { diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java index 4df50fa1dc..854cca0715 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveIntervalYearMonth.java @@ -17,10 +17,17 @@ */ package org.apache.hadoop.hive.common.type; -import org.junit.*; -import static org.junit.Assert.*; -import com.google.code.tempusfugit.concurrency.annotations.*; -import com.google.code.tempusfugit.concurrency.*; +import com.google.code.tempusfugit.concurrency.ConcurrentRule; +import com.google.code.tempusfugit.concurrency.RepeatingRule; +import com.google.code.tempusfugit.concurrency.annotations.Concurrent; +import com.google.code.tempusfugit.concurrency.annotations.Repeating; +import org.junit.Rule; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; public class TestHiveIntervalYearMonth { diff --git common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java index e675fd47a6..0a68be5cb1 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestHiveVarchar.java @@ -17,20 +17,18 @@ */ package org.apache.hadoop.hive.common.type; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.util.Random; - -import org.junit.Rule; -import org.junit.Test; - import com.google.code.tempusfugit.concurrency.ConcurrentRule; import com.google.code.tempusfugit.concurrency.RepeatingRule; import com.google.code.tempusfugit.concurrency.annotations.Concurrent; import com.google.code.tempusfugit.concurrency.annotations.Repeating; +import org.junit.Rule; +import org.junit.Test; + +import java.util.Random; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class TestHiveVarchar { @Rule public ConcurrentRule concurrentRule = new ConcurrentRule(); @@ -40,7 +38,7 @@ public TestHiveVarchar() { super(); } - static Random rnd = new Random(); + static final Random rnd = new Random(); public static int getRandomSupplementaryChar() { int lowSurrogate = 0xDC00 + rnd.nextInt(1024); @@ -78,10 +76,10 @@ public void testStringLength() throws Exception { int strLen = 20; int[] lengths = { 15, 20, 25 }; // Try with supplementary characters - for (int idx1 = 0; idx1 < lengths.length; ++idx1) { + for (int length : lengths) { // Create random test string StringBuilder sb = new StringBuilder(); - int curLen = lengths[idx1]; + int curLen = length; for (int idx2 = 0; idx2 < curLen; ++idx2) { sb.appendCodePoint(getRandomCodePoint(' ')); } diff --git common/src/test/org/apache/hadoop/hive/common/type/TestSignedInt128.java common/src/test/org/apache/hadoop/hive/common/type/TestSignedInt128.java index 98695e8019..4c4c38c93f 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestSignedInt128.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestSignedInt128.java @@ -17,17 +17,17 @@ */ package org.apache.hadoop.hive.common.type; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.math.BigInteger; + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import java.math.BigInteger; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - /** * Testcases for {@link SignedInt128} * diff --git common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java index 9ad550dcde..0867860257 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestSqlMathUtil.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.hive.common.type; -import static org.junit.Assert.*; import org.junit.Test; +import static org.junit.Assert.assertArrayEquals; + /** * This code was based on code from Microsoft's PolyBase. */ diff --git common/src/test/org/apache/hadoop/hive/common/type/TestUnsignedInt128.java common/src/test/org/apache/hadoop/hive/common/type/TestUnsignedInt128.java index c711d5beb6..6319f19906 100644 --- common/src/test/org/apache/hadoop/hive/common/type/TestUnsignedInt128.java +++ common/src/test/org/apache/hadoop/hive/common/type/TestUnsignedInt128.java @@ -17,14 +17,17 @@ */ package org.apache.hadoop.hive.common.type; -import static org.junit.Assert.*; - -import java.math.BigInteger; - import org.junit.After; import org.junit.Before; import org.junit.Test; +import java.math.BigInteger; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + /** * Testcases for {@link UnsignedInt128} * diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java index bb23882c3b..b9e615de33 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveAsyncLogging.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hive.conf; +import junit.framework.TestCase; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.logging.log4j.LogManager; @@ -26,8 +27,6 @@ import org.apache.logging.log4j.core.selector.ContextSelector; import org.junit.Test; -import junit.framework.TestCase; - public class TestHiveAsyncLogging extends TestCase { // this test requires disruptor jar in classpath diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java index 780a708dab..a137a3c37d 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java @@ -18,10 +18,9 @@ package org.apache.hadoop.hive.conf; import com.google.common.collect.Lists; -import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.util.Shell; +import org.apache.hadoop.mapred.JobConf; import org.apache.hive.common.util.HiveTestUtils; import org.junit.Assert; import org.junit.Test; diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java index eb2f9647a2..9f3ca47db5 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConfRestrictList.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hive.conf; import junit.framework.TestCase; - import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.junit.Test; diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java index 76bee5fa49..28edbcc697 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveConfUtil.java @@ -28,7 +28,7 @@ */ public class TestHiveConfUtil { - private HiveConf conf = new HiveConf(); + private final HiveConf conf = new HiveConf(); @Before public void init() { diff --git common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java index f9c3283f04..f7a403ca06 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java +++ common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hive.conf; -import java.io.File; - +import junit.framework.TestCase; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.common.util.HiveTestUtils; -import junit.framework.TestCase; +import java.io.File; /** * TestHiveLogging diff --git common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java index 6004aba0eb..0332975e4c 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java +++ common/src/test/org/apache/hadoop/hive/conf/TestSystemVariables.java @@ -23,7 +23,6 @@ import org.junit.Test; import static junit.framework.TestCase.assertEquals; -import static junit.framework.TestCase.assertNull; public class TestSystemVariables { public static final String SYSTEM = "system"; diff --git common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java index 683736f0bf..0ee47fe257 100644 --- common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java +++ common/src/test/org/apache/hadoop/hive/conf/TestVariableSubstitution.java @@ -40,19 +40,11 @@ private static LocalMySource getMySource() { return localSource.get(); } - private static ThreadLocal localSource = new ThreadLocal() { - @Override protected LocalMySource initialValue() { - return new LocalMySource(); - } - }; + private static final ThreadLocal localSource = ThreadLocal.withInitial(LocalMySource::new); @Test public void testVariableSource() throws InterruptedException { final VariableSubstitution variableSubstitution = - new VariableSubstitution(new HiveVariableSource() { - @Override public Map getHiveVariable() { - return TestVariableSubstitution.getMySource().map; - } - }); + new VariableSubstitution(() -> TestVariableSubstitution.getMySource().map); String v = variableSubstitution.substitute(new HiveConf(), "${a}"); Assert.assertEquals("${a}", v); diff --git common/src/test/org/apache/hive/common/util/MockFileSystem.java common/src/test/org/apache/hive/common/util/MockFileSystem.java index 1a0b41902d..3f3ddb34c7 100644 --- common/src/test/org/apache/hive/common/util/MockFileSystem.java +++ common/src/test/org/apache/hive/common/util/MockFileSystem.java @@ -18,19 +18,6 @@ package org.apache.hive.common.util; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; @@ -46,15 +33,28 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; + public class MockFileSystem extends FileSystem { - final List files = new ArrayList(); + final List files = new ArrayList<>(); final Map fileStatusMap = new HashMap<>(); Path workingDir = new Path("/"); // statics for when the mock fs is created via FileSystem.get private static String blockedUgi = null; - private final static List globalFiles = new ArrayList(); + private final static List globalFiles = new ArrayList<>(); protected Statistics statistics; - public boolean allowDelete = false; + public final boolean allowDelete = false; public MockFileSystem() { // empty @@ -186,7 +186,7 @@ public boolean delete(Path path, boolean isRecursive) throws IOException { public RemoteIterator listLocatedStatus(final Path f) throws IOException { return new RemoteIterator() { - private Iterator iterator = listLocatedFileStatuses(f).iterator(); + private final Iterator iterator = listLocatedFileStatuses(f).iterator(); @Override public boolean hasNext() throws IOException { @@ -207,7 +207,7 @@ public LocatedFileStatus next() throws IOException { List result = new ArrayList<>(); String pathname = path.toString(); String pathnameAsDir = pathname + "/"; - Set dirs = new TreeSet(); + Set dirs = new TreeSet<>(); MockFile file = findFile(path); if (file != null) { result.add(createLocatedStatus(file)); @@ -227,10 +227,10 @@ public LocatedFileStatus next() throws IOException { statistics.incrementReadOps(1); checkAccess(); path = path.makeQualified(this); - List result = new ArrayList(); + List result = new ArrayList<>(); String pathname = path.toString(); String pathnameAsDir = pathname + "/"; - Set dirs = new TreeSet(); + Set dirs = new TreeSet<>(); MockFile file = findFile(path); if (file != null) { return new FileStatus[]{createStatus(file)}; @@ -374,7 +374,7 @@ public FileStatus getFileStatus(Path path) throws IOException { statistics.incrementReadOps(1); } checkAccess(); - List result = new ArrayList(); + List result = new ArrayList<>(); MockFile file = findFile(stat.getPath()); if (file != null) { for(MockBlock block: file.blocks) { @@ -473,11 +473,11 @@ public String toString() { public static class MockFile { public final Path path; - public int blockSize; + public final int blockSize; public int length; public MockBlock[] blocks; public byte[] content; - public boolean cannotDelete = false; + public final boolean cannotDelete = false; // This is purely for testing convenience; has no bearing on FS operations such as list. public boolean isDeleted = false; diff --git common/src/test/org/apache/hive/common/util/TestACLConfigurationParser.java common/src/test/org/apache/hive/common/util/TestACLConfigurationParser.java index 166f18d111..6ae0b9d8fc 100644 --- common/src/test/org/apache/hive/common/util/TestACLConfigurationParser.java +++ common/src/test/org/apache/hive/common/util/TestACLConfigurationParser.java @@ -18,12 +18,12 @@ package org.apache.hive.common.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import org.apache.hadoop.conf.Configuration; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + public class TestACLConfigurationParser { diff --git common/src/test/org/apache/hive/common/util/TestDateParser.java common/src/test/org/apache/hive/common/util/TestDateParser.java index 8c3a7a4fee..88b101cd99 100644 --- common/src/test/org/apache/hive/common/util/TestDateParser.java +++ common/src/test/org/apache/hive/common/util/TestDateParser.java @@ -17,14 +17,17 @@ */ package org.apache.hive.common.util; -import static org.junit.Assert.*; - import org.apache.hadoop.hive.common.type.Date; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + public class TestDateParser { - DateParser parser = new DateParser(); - Date date = new Date(); + final DateParser parser = new DateParser(); + final Date date = new Date(); void checkValidCase(String strValue, Date expected) { Date dateValue = parser.parseDate(strValue); diff --git common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java index b026e54424..8ab0414f2b 100644 --- common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java +++ common/src/test/org/apache/hive/common/util/TestFixedSizedObjectPool.java @@ -17,7 +17,10 @@ */ package org.apache.hive.common.util; -import static org.junit.Assert.*; +import org.apache.hadoop.hive.common.Pool; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashSet; @@ -27,11 +30,9 @@ import java.util.concurrent.Executors; import java.util.concurrent.FutureTask; -import org.apache.hive.common.util.FixedSizedObjectPool; -import org.apache.hadoop.hive.common.Pool; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertTrue; public class TestFixedSizedObjectPool { @@ -187,15 +188,15 @@ public void testMTTImpl(int size, int takerCount, int giverCount) { int ti = 0; for (int i = 0; i < takerCount; ++i, ++ti) { takers[i] = new TakeRunnable(pool, cdlIn, cdlOut, TAKECOUNT); - tasks[ti] = new FutureTask(takers[i], null); + tasks[ti] = new FutureTask<>(takers[i], null); executor.execute(tasks[ti]); } for (int i = 0; i < giverCount; ++i, ++ti) { givers[i] = new OfferRunnable(pool, cdlIn, cdlOut, GIVECOUNT); - tasks[ti] = new FutureTask(givers[i], null); + tasks[ti] = new FutureTask<>(givers[i], null); executor.execute(tasks[ti]); } - long time = 0; + long time; try { cdlIn.await(); // Wait for all threads to be ready. time = System.nanoTime(); diff --git common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java index 5783a1f877..6141417632 100644 --- common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java +++ common/src/test/org/apache/hive/common/util/TestHiveStringUtils.java @@ -18,12 +18,14 @@ package org.apache.hive.common.util; -import static org.apache.hive.common.util.HiveStringUtils.removeComments; -import static org.junit.Assert.*; +import org.junit.Test; import java.util.Arrays; -import org.junit.Test; +import static org.apache.hive.common.util.HiveStringUtils.removeComments; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; public class TestHiveStringUtils { @Test diff --git common/src/test/org/apache/hive/common/util/TestRetryUtilities.java common/src/test/org/apache/hive/common/util/TestRetryUtilities.java index b786bd193d..228ba7b8fa 100644 --- common/src/test/org/apache/hive/common/util/TestRetryUtilities.java +++ common/src/test/org/apache/hive/common/util/TestRetryUtilities.java @@ -17,14 +17,14 @@ */ package org.apache.hive.common.util; -import java.util.ArrayList; -import java.util.List; - import org.apache.hive.common.util.RetryUtilities.ExponentiallyDecayingBatchWork; import org.apache.hive.common.util.RetryUtilities.RetryException; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; +import java.util.List; + public class TestRetryUtilities { private class DummyExponentiallyDecayingBatchWork extends ExponentiallyDecayingBatchWork { @@ -41,7 +41,7 @@ public DummyExponentiallyDecayingBatchWork(int batchSize, int reducingFactor, } final List batchSizes = new ArrayList<>(); - int exceptionCount = 0; + int exceptionCount; @Override public Void execute(int size) throws Exception { diff --git common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java index c4d0bdba2d..9735da4349 100644 --- common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java +++ common/src/test/org/apache/hive/common/util/TestShutdownHookManager.java @@ -18,14 +18,13 @@ package org.apache.hive.common.util; +import org.apache.hadoop.hive.common.FileUtils; import org.junit.Assert; import org.junit.Test; import java.io.File; import java.io.IOException; -import org.apache.hadoop.hive.common.FileUtils; - /** * TestShutdownHookManager. * diff --git common/src/test/org/apache/hive/common/util/TestTimestampParser.java common/src/test/org/apache/hive/common/util/TestTimestampParser.java index 00a7904ecf..63006d07b3 100644 --- common/src/test/org/apache/hive/common/util/TestTimestampParser.java +++ common/src/test/org/apache/hive/common/util/TestTimestampParser.java @@ -18,16 +18,16 @@ package org.apache.hive.common.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - import org.apache.hadoop.hive.common.type.Timestamp; import org.junit.Test; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + public class TestTimestampParser { public static class ValidTimestampCase { - String strValue; - Timestamp expectedValue; + final String strValue; + final Timestamp expectedValue; public ValidTimestampCase(String strValue, Timestamp expectedValue) { this.strValue = strValue;