diff --git a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java index 5d475f4..50dc7df 100644 --- a/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java +++ b/common/src/java/org/apache/hadoop/hive/common/JvmPauseMonitor.java @@ -37,6 +37,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; /** * Based on the JvmPauseMonitor from Hadoop. @@ -172,7 +173,7 @@ public String toString() { private class Monitor implements Runnable { @Override public void run() { - Stopwatch sw = new Stopwatch(); + Stopwatch sw = Stopwatch.createStarted(); Map gcTimesBeforeSleep = getGcTimes(); while (shouldRun) { sw.reset().start(); @@ -181,7 +182,7 @@ public void run() { } catch (InterruptedException ie) { return; } - long extraSleepTime = sw.elapsedMillis() - SLEEP_INTERVAL_MS; + long extraSleepTime = sw.elapsed(TimeUnit.MILLISECONDS) - SLEEP_INTERVAL_MS; Map gcTimesAfterSleep = getGcTimes(); if (extraSleepTime > warnThresholdMs) { diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java index 87bd5c8..5e71d4d 100644 --- a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java +++ b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java @@ -17,19 +17,12 @@ */ package org.apache.hadoop.hive.llap.daemon.impl; -import java.net.InetSocketAddress; -import java.nio.ByteBuffer; -import java.security.PrivilegedExceptionAction; -import java.util.HashMap; -import java.util.Map; -import java.util.Stack; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Stopwatch; +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.llap.daemon.FragmentCompletionHandler; @@ -50,7 +43,6 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.log4j.MDC; import org.apache.log4j.NDC; import org.apache.tez.common.CallableWithNdc; @@ -58,10 +50,7 @@ import org.apache.tez.common.security.JobTokenIdentifier; import org.apache.tez.common.security.TokenCache; import org.apache.tez.dag.api.TezConstants; -import org.apache.tez.dag.records.TezDAGID; import org.apache.tez.dag.records.TezTaskAttemptID; -import org.apache.tez.dag.records.TezTaskID; -import org.apache.tez.dag.records.TezVertexID; import org.apache.tez.hadoop.shim.HadoopShim; import org.apache.tez.runtime.api.ExecutionContext; import org.apache.tez.runtime.api.impl.TaskSpec; @@ -75,15 +64,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Joiner; -import com.google.common.base.Stopwatch; -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.google.common.util.concurrent.ThreadFactoryBuilder; +import java.net.InetSocketAddress; +import java.nio.ByteBuffer; +import java.security.PrivilegedExceptionAction; +import java.util.HashMap; +import java.util.Map; +import java.util.Stack; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; /** * @@ -116,8 +107,8 @@ private final String queryId; private final HadoopShim tezHadoopShim; private boolean shouldRunTask = true; - final Stopwatch runtimeWatch = new Stopwatch(); - final Stopwatch killtimerWatch = new Stopwatch(); + final Stopwatch runtimeWatch = Stopwatch.createStarted(); + final Stopwatch killtimerWatch = Stopwatch.createStarted(); private final AtomicBoolean isStarted = new AtomicBoolean(false); private final AtomicBoolean isCompleted = new AtomicBoolean(false); private final AtomicBoolean killInvoked = new AtomicBoolean(false); @@ -265,7 +256,7 @@ public LlapTaskUmbilicalProtocol run() throws Exception { } finally { FileSystem.closeAllForUGI(taskUgi); LOG.info("ExecutionTime for Container: " + request.getContainerIdString() + "=" + - runtimeWatch.stop().elapsedMillis()); + runtimeWatch.stop().elapsed(TimeUnit.MILLISECONDS)); if (LOG.isDebugEnabled()) { LOG.debug( "canFinish post completion: " + taskSpec.getTaskAttemptID() + ": " + canFinish()); @@ -458,14 +449,14 @@ public void onSuccess(TaskRunner2Result result) { LOG.info("Killed task {}", requestId); if (killtimerWatch.isRunning()) { killtimerWatch.stop(); - long elapsed = killtimerWatch.elapsedMillis(); + long elapsed = killtimerWatch.elapsed(TimeUnit.MILLISECONDS); LOG.info("Time to die for task {}", elapsed); if (metrics != null) { metrics.addMetricsPreemptionTimeToKill(elapsed); } } if (metrics != null) { - metrics.addMetricsPreemptionTimeLost(runtimeWatch.elapsedMillis()); + metrics.addMetricsPreemptionTimeLost(runtimeWatch.elapsed(TimeUnit.MILLISECONDS)); metrics.incrExecutorTotalKilled(); } break; diff --git a/pom.xml b/pom.xml index 3fc35bc..b401816 100644 --- a/pom.xml +++ b/pom.xml @@ -137,7 +137,7 @@ 3.1.0 0.1.2 0.9.1.1 - 14.0.1 + 16.0.1 2.4.4 2.7.2 ${basedir}/${hive.path.to.root}/testutils/hadoop diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java index 178a2de..27cb36d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java @@ -18,21 +18,14 @@ package org.apache.hadoop.hive.ql.hooks; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - +import com.google.common.base.Charsets; +import com.google.common.collect.Lists; +import com.google.common.hash.Hasher; +import com.google.common.hash.Hashing; +import com.google.gson.stream.JsonWriter; import org.apache.commons.collections.SetUtils; import org.apache.commons.io.output.StringBuilderWriter; import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -50,11 +43,18 @@ import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import com.google.common.collect.Lists; -import com.google.common.hash.Hasher; -import com.google.common.hash.Hashing; -import com.google.gson.stream.JsonWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Implementation of a post execute hook that logs lineage info to a log file. @@ -462,7 +462,7 @@ private void writeVertices(JsonWriter writer, Set vertices) throws IOExc */ private String getQueryHash(String queryStr) { Hasher hasher = Hashing.md5().newHasher(); - hasher.putString(queryStr); + hasher.putString(queryStr, Charsets.UTF_8); return hasher.hash().toString(); } }