Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java (revision 10347) +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java (working copy) @@ -105,6 +105,11 @@ public void initIOContext(FileSplit split, JobConf job, Class inputFormatClass) throws IOException { + this.initIOContext(split, job, inputFormatClass, null); + } + + public void initIOContext(FileSplit split, JobConf job, + Class inputFormatClass, RecordReader recordReader) throws IOException { boolean blockPointer = false; long blockStart = -1; FileSplit fileSplit = (FileSplit) split; @@ -116,9 +121,12 @@ in.sync(fileSplit.getStart()); blockStart = in.getPosition(); in.close(); + } else if (recordReader instanceof RCFileRecordReader) { + blockPointer = true; + blockStart = ((RCFileRecordReader) recordReader).getStart(); } else if (inputFormatClass.getName().contains("RCFile")) { + blockPointer = true; RCFile.Reader in = new RCFile.Reader(fs, path, job); - blockPointer = true; in.sync(fileSplit.getStart()); blockStart = in.getPosition(); in.close(); Index: ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java (revision 10347) +++ ql/src/java/org/apache/hadoop/hive/ql/io/RCFileRecordReader.java (working copy) @@ -110,7 +110,7 @@ /** * Return the progress within the input split. - * + * * @return 0.0 to 1.0 of the input byte range */ public float getProgress() throws IOException { @@ -129,6 +129,10 @@ in.seek(pos); } + public long getStart() { + return start; + } + public void close() throws IOException { in.close(); } Index: ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java (revision 10347) +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java (working copy) @@ -66,9 +66,10 @@ .getLocations()); this.recordReader = inputFormat.getRecordReader(fsplit, job, reporter); - this.initIOContext(fsplit, job, inputFormatClass); + this.initIOContext(fsplit, job, inputFormatClass, this.recordReader); } + @Override public void doClose() throws IOException { recordReader.close(); } @@ -89,6 +90,7 @@ return recordReader.getProgress(); } + @Override public boolean doNext(K key, V value) throws IOException { if (ExecMapper.getDone()) { return false; Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (revision 10347) +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (working copy) @@ -33,9 +33,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.Operator; -import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -185,7 +183,7 @@ public static InputFormat getInputFormatFromCache( Class inputFormatClass, JobConf job) throws IOException { - + if (inputFormats == null) { inputFormats = new HashMap>(); } @@ -229,10 +227,12 @@ if ((part != null) && (part.getTableDesc() != null)) { Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), cloneJobConf); } - - HiveRecordReader rr = new HiveRecordReader(inputFormat.getRecordReader(inputSplit, - cloneJobConf, reporter)); - rr.initIOContext(hsplit, job, inputFormatClass); + + RecordReader innerReader = inputFormat.getRecordReader(inputSplit, + cloneJobConf, reporter); + + HiveRecordReader rr = new HiveRecordReader(innerReader); + rr.initIOContext(hsplit, job, inputFormatClass, innerReader); return rr; } @@ -274,7 +274,7 @@ pushFilters(newjob, tableScan); } } - + FileInputFormat.setInputPaths(newjob, dir); newjob.setInputFormat(inputFormat.getClass()); InputSplit[] iss = inputFormat.getSplits(newjob, numSplits / dirs.length); @@ -353,7 +353,7 @@ TableScanDesc.FILTER_EXPR_CONF_STR, filterExprSerialized); } - + protected void pushProjectionsAndFilters(JobConf jobConf, Class inputFormatClass, String splitPath, String splitPathWithNoSchema) { if (this.mrwork == null) {