diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 8f51f3c..d7418db 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -165,7 +165,7 @@ public int executeTask() { } return retval; } catch (IOException e) { - throw new RuntimeException(e.getMessage()); + throw new RuntimeException("Unexpected error: " + e.getMessage(), e); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index 61cc874..5c4459b 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -71,8 +71,20 @@ public class HiveInputFormat implements InputFormat, JobConfigurable { - public static final String CLASS_NAME = HiveInputFormat.class.getName(); - public static final Log LOG = LogFactory.getLog(CLASS_NAME); + private static final String CLASS_NAME = HiveInputFormat.class.getName(); + private static final Log LOG = LogFactory.getLog(CLASS_NAME); + + /** + * A cache of InputFormat instances. + */ + private static Map> inputFormats + = new ConcurrentHashMap>(); + + private JobConf job; + + // both classes access by subclasses + protected Map pathToPartitionInfo; + protected MapWork mrwork; /** * HiveInputSplit encapsulates an InputSplit with its corresponding @@ -178,18 +190,10 @@ public void setConf(Configuration conf) { } } - JobConf job; - public void configure(JobConf job) { this.job = job; } - /** - * A cache of InputFormat instances. - */ - protected static Map> inputFormats - = new ConcurrentHashMap>(); - public static InputFormat getInputFormatFromCache( Class inputFormatClass, JobConf job) throws IOException { @@ -248,9 +252,6 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, return rr; } - protected Map pathToPartitionInfo; - MapWork mrwork = null; - protected void init(JobConf job) { mrwork = Utilities.getMapWork(job); pathToPartitionInfo = mrwork.getPathToPartitionInfo(); @@ -281,7 +282,6 @@ private void addSplitsForGroup(List dirs, TableScanOperator tableScan, Job headerCount = Utilities.getHeaderCount(table); footerCount = Utilities.getFooterCount(table, conf); if (headerCount != 0 || footerCount != 0) { - // Input file has header or footer, cannot be splitted. conf.setLong( ShimLoader.getHadoopShims().getHadoopConfNames().get("MAPREDMINSPLITSIZE"),