diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java index 2500fb6..52e1b06 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -50,7 +49,6 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface; import org.apache.hadoop.hive.ql.io.AcidInputFormat; -import org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.AcidUtils.Directory; @@ -59,12 +57,10 @@ import org.apache.hadoop.hive.ql.io.RecordIdentifier; import org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader; import org.apache.hadoop.hive.ql.io.orc.OrcFile.WriterVersion; -import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.Context; import org.apache.hadoop.hive.ql.io.sarg.ConvertAstToSearchArg; import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue; -import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -1060,7 +1056,7 @@ private long computeProjectionSize(final Reader orcReader, final boolean[] inclu } catch (Exception e) { cancelFutures(pathFutures); cancelFutures(splitFutures); - throw new RuntimeException("serious problem", e); + throw new RuntimeException("ORC split generation failed with exception: " + e.getMessage(), e); } if (context.cacheStripeDetails) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index 8ba4d2e..f451fce 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -1311,6 +1311,25 @@ public void testEmptyFile() throws Exception { assertEquals(null, serde.getSerDeStats()); } + @Test(expected = RuntimeException.class) + public void testSplitGenFailure() throws IOException { + Properties properties = new Properties(); + HiveOutputFormat outFormat = new OrcOutputFormat(); + org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer = + outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true, + properties, Reporter.NULL); + writer.close(true); + InputFormat in = new OrcInputFormat(); + fs.setPermission(testFilePath, FsPermission.createImmutable((short) 0333)); + FileInputFormat.setInputPaths(conf, testFilePath.toString()); + try { + in.getSplits(conf, 1); + } catch (RuntimeException e) { + assertEquals(true, e.getMessage().contains("Permission denied")); + throw e; + } + } + static class StringRow implements Writable { String str; String str2;