diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java index b9d017e..8759661 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java @@ -275,7 +275,8 @@ public int hashCode() { /** * Create Hive splits based on CombineFileSplit. */ - private InputSplit[] getCombineSplits(JobConf job, int numSplits, Map pathToPartitionInfo) + private InputSplit[] getCombineSplits(JobConf job, int numSplits, + Map pathToPartitionInfo) throws IOException { PerfLogger perfLogger = PerfLogger.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS); @@ -512,7 +513,8 @@ public int hashCode() { if (combinablePaths.size() > 0) { FileInputFormat.setInputPaths(job, combinablePaths.toArray (new Path[combinablePaths.size()])); - Map pathToPartitionInfo = Utilities.getMapWork(job).getPathToPartitionInfo(); + Map pathToPartitionInfo = this.pathToPartitionInfo != null ? + this.pathToPartitionInfo : Utilities.getMapWork(job).getPathToPartitionInfo(); InputSplit[] splits = getCombineSplits(job, numSplits, pathToPartitionInfo); for (InputSplit split : splits) { result.add(split); @@ -658,7 +660,7 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, - ((CombineHiveInputSplit) split).getInputSplitShim(), reporter, + (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java index 5ac9f85..ede3b6e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java @@ -44,8 +44,9 @@ public CombineHiveRecordReader(InputSplit split, Configuration conf, Reporter reporter, Integer partition) throws IOException { super((JobConf)conf); - CombineHiveInputSplit hsplit = new CombineHiveInputSplit(jobConf, - (CombineFileSplit) split); + CombineHiveInputSplit hsplit = split instanceof CombineHiveInputSplit ? + (CombineHiveInputSplit) split : + new CombineHiveInputSplit(jobConf, (CombineFileSplit) split); String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index df54cf4..9d5730d 100755 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -258,15 +258,17 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, } protected void init(JobConf job) { - if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { - mrwork = (MapWork) Utilities.getMergeWork(job); - if (mrwork == null) { + if (mrwork == null || pathToPartitionInfo == null) { + if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { + mrwork = (MapWork) Utilities.getMergeWork(job); + if (mrwork == null) { + mrwork = Utilities.getMapWork(job); + } + } else { mrwork = Utilities.getMapWork(job); } - } else { - mrwork = Utilities.getMapWork(job); + pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } - pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } /*