diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index ee42f4c..662509b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -365,17 +365,7 @@ private static BaseWork getBaseWork(Configuration conf, String name) { assert path != null; if (!gWorkMap.containsKey(path) || HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) { - Path localPath; - if (conf.getBoolean("mapreduce.task.uberized", false) && name.equals(REDUCE_PLAN_NAME)) { - localPath = new Path(name); - } else if (ShimLoader.getHadoopShims().isLocalMode(conf)) { - localPath = path; - } else { - LOG.info("***************non-local mode***************"); - localPath = new Path(name); - } - localPath = path; - LOG.info("local path = " + localPath); + LOG.info("path = " + path); if (HiveConf.getBoolVar(conf, ConfVars.HIVE_RPC_QUERY_PLAN)) { LOG.debug("Loading plan from string: "+path.toUri().getPath()); String planString = conf.get(path.toUri().getPath()); @@ -387,8 +377,8 @@ private static BaseWork getBaseWork(Configuration conf, String name) { in = new ByteArrayInputStream(planBytes); in = new InflaterInputStream(in); } else { - LOG.info("Open file to read in plan: " + localPath); - in = localPath.getFileSystem(conf).open(localPath); + LOG.info("Open file to read in plan: " + path); + in = path.getFileSystem(conf).open(path); } if(MAP_PLAN_NAME.equals(name)){ diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java index b9d017e..edc90ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java @@ -275,8 +275,8 @@ public int hashCode() { /** * Create Hive splits based on CombineFileSplit. */ - private InputSplit[] getCombineSplits(JobConf job, int numSplits, Map pathToPartitionInfo) - throws IOException { + private InputSplit[] getCombineSplits(JobConf job, int numSplits, + Map pathToPartitionInfo) throws IOException { PerfLogger perfLogger = PerfLogger.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS); init(job); @@ -512,7 +512,8 @@ public int hashCode() { if (combinablePaths.size() > 0) { FileInputFormat.setInputPaths(job, combinablePaths.toArray (new Path[combinablePaths.size()])); - Map pathToPartitionInfo = Utilities.getMapWork(job).getPathToPartitionInfo(); + Map pathToPartitionInfo = this.pathToPartitionInfo != null ? + this.pathToPartitionInfo : Utilities.getMapWork(job).getPathToPartitionInfo(); InputSplit[] splits = getCombineSplits(job, numSplits, pathToPartitionInfo); for (InputSplit split : splits) { result.add(split); @@ -658,7 +659,7 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, - ((CombineHiveInputSplit) split).getInputSplitShim(), reporter, + (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java index 5ac9f85..ede3b6e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java @@ -44,8 +44,9 @@ public CombineHiveRecordReader(InputSplit split, Configuration conf, Reporter reporter, Integer partition) throws IOException { super((JobConf)conf); - CombineHiveInputSplit hsplit = new CombineHiveInputSplit(jobConf, - (CombineFileSplit) split); + CombineHiveInputSplit hsplit = split instanceof CombineHiveInputSplit ? + (CombineHiveInputSplit) split : + new CombineHiveInputSplit(jobConf, (CombineFileSplit) split); String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try { diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index df54cf4..9d5730d 100755 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -258,15 +258,17 @@ public RecordReader getRecordReader(InputSplit split, JobConf job, } protected void init(JobConf job) { - if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { - mrwork = (MapWork) Utilities.getMergeWork(job); - if (mrwork == null) { + if (mrwork == null || pathToPartitionInfo == null) { + if (HiveConf.getVar(job, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { + mrwork = (MapWork) Utilities.getMergeWork(job); + if (mrwork == null) { + mrwork = Utilities.getMapWork(job); + } + } else { mrwork = Utilities.getMapWork(job); } - } else { - mrwork = Utilities.getMapWork(job); + pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } - pathToPartitionInfo = mrwork.getPathToPartitionInfo(); } /*