Index: ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java (revision 1055171) +++ ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexInputFormat.java (working copy) @@ -44,7 +44,7 @@ for (Path dir : dirs) { PartitionDesc part = HiveFileFormatUtils .getPartitionDescFromPathRecursively(pathToPartitionInfo, dir, - IOPrepareCache.get().allocatePartitionDescMap()); + IOPrepareCache.get().allocatePartitionDescMap(), true); // create a new InputFormat instance if this is the first time to see this // class Class inputFormatClass = part.getInputFileFormatClass(); Index: ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexResult.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexResult.java (revision 1055171) +++ ql/src/java/org/apache/hadoop/hive/ql/index/compact/HiveCompactIndexResult.java (working copy) @@ -74,6 +74,7 @@ JobConf job = null; BytesRefWritable[] bytesRef = new BytesRefWritable[2]; + boolean ignoreHdfsLoc = false; public HiveCompactIndexResult(String indexFile, JobConf conf) throws IOException, HiveException { @@ -81,6 +82,7 @@ bytesRef[0] = new BytesRefWritable(); bytesRef[1] = new BytesRefWritable(); + ignoreHdfsLoc = job.getBoolean("hive.index.compact.file.ignore.hdfs", false); if (indexFile != null) { Path indexFilePath = new Path(indexFile); @@ -128,6 +130,11 @@ + line.toString()); } String bucketFileName = new String(bytes, 0, firstEnd); + + if (ignoreHdfsLoc) { + Path tmpPath = new Path(bucketFileName); + bucketFileName = tmpPath.toUri().getPath(); + } IBucket bucket = buckets.get(bucketFileName); if (bucket == null) { bucket = new IBucket(bucketFileName); Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (revision 1055171) +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (working copy) @@ -249,11 +249,19 @@ Map pathToPartitionInfo, Path dir, Map, Map> cacheMap) throws IOException { + return getPartitionDescFromPathRecursively(pathToPartitionInfo, dir, + cacheMap, false); + } + + public static PartitionDesc getPartitionDescFromPathRecursively( + Map pathToPartitionInfo, Path dir, + Map, Map> cacheMap, + boolean ignoreSchema) throws IOException { PartitionDesc part = doGetPartitionDescFromPath(pathToPartitionInfo, dir); if (part == null - && (dir.toUri().getScheme() == null || dir.toUri().getScheme().trim() - .equals(""))) { + && (ignoreSchema || (dir.toUri().getScheme() == null || dir.toUri().getScheme().trim() + .equals("")))) { Map newPathToPartitionInfo = null; if (cacheMap != null) { @@ -275,7 +283,7 @@ return part; } else { throw new IOException("cannot find dir = " + dir.toString() - + " in partToPartitionInfo: " + pathToPartitionInfo.keySet()); + + " in pathToPartitionInfo: " + pathToPartitionInfo.keySet()); } }