commit 7dd5e1ce2f2f5de1ab26871184288970b6f10936 Author: Bharath Krishna Date: Mon Oct 15 10:16:35 2018 -0700 HIVE-20488 : SparkSubmitSparkClient#launchDriver should parse exceptions, not just errors diff --git spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java index d3cb3dd7a1c626d73afe5790d9c3b988fc1e84ae..b434d8f7b7a3b585183cd842bd9893d00a85da1b 100644 --- spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java +++ spark-client/src/main/java/org/apache/hive/spark/client/SparkClientUtilities.java @@ -19,6 +19,7 @@ package org.apache.hive.spark.client; import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.io.File; @@ -49,7 +50,7 @@ public static final String HIVE_KRYO_REG_NAME = "org.apache.hive.spark.HiveKryoRegistrator"; private static final String HIVE_KRYO_REG_JAR_NAME = "hive-kryo-registrator"; - + private static final ImmutableList ERROR_KEYWORDS = ImmutableList.of("error", "exception"); /** * Add new elements to the classpath. * @@ -186,4 +187,8 @@ public static void addJarToContextLoader(File jar) throws MalformedURLException Thread.currentThread().setContextClassLoader(newLoader); } } + + public static boolean containsErrorKeyword(String line) { + return ERROR_KEYWORDS.stream().anyMatch(x -> StringUtils.containsIgnoreCase(line, x)); + } } diff --git spark-client/src/main/java/org/apache/hive/spark/client/SparkSubmitSparkClient.java spark-client/src/main/java/org/apache/hive/spark/client/SparkSubmitSparkClient.java index 1879829700198d4702a6b7a8ab7b8717f4d4151c..f42fee2606c329105c608effe8e33fd9cc94c728 100644 --- spark-client/src/main/java/org/apache/hive/spark/client/SparkSubmitSparkClient.java +++ spark-client/src/main/java/org/apache/hive/spark/client/SparkSubmitSparkClient.java @@ -34,8 +34,6 @@ import java.util.regex.Pattern; import java.util.regex.Matcher; -import org.apache.commons.lang3.StringUtils; - import org.apache.hadoop.hive.common.log.LogRedirector; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; @@ -44,6 +42,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.apache.hive.spark.client.SparkClientUtilities.containsErrorKeyword; /** * Extends the {@link AbstractSparkClient} and launches a child process to run Spark's {@code @@ -228,7 +227,7 @@ private String getSparkJobCredentialProviderPassword() { List errorMessages = new ArrayList<>(); synchronized (childErrorLog) { for (String line : childErrorLog) { - if (StringUtils.containsIgnoreCase(line, "Error")) { + if (containsErrorKeyword(line)) { errorMessages.add("\"" + line + "\""); } } diff --git spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java index 996b24ed7f0bdd04311c8921646d066cd99e55da..0e1557e1b0976e5f7f05c255ace9ac80fa119bea 100644 --- spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java +++ spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java @@ -301,6 +301,17 @@ public void call(SparkClient client) throws Exception { }); } + @Test + public void testErrorParsing() { + assertTrue(SparkClientUtilities.containsErrorKeyword("Error.. Test")); + assertTrue(SparkClientUtilities.containsErrorKeyword("This line has error..")); + assertTrue(SparkClientUtilities.containsErrorKeyword("Test that line has ExcePtion..")); + assertTrue(SparkClientUtilities.containsErrorKeyword("Here is eRRor in line..")); + assertTrue(SparkClientUtilities.containsErrorKeyword("Here is ExceptioNn in line..")); + assertTrue(SparkClientUtilities.containsErrorKeyword("Here is ERROR and Exception in line..")); + assertFalse(SparkClientUtilities.containsErrorKeyword("No problems in this line")); + } + private static final Logger LOG = LoggerFactory.getLogger(TestSparkClient.class); private JobHandle.Listener newListener() {