diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java index 7987c4e..16ced13 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java @@ -511,6 +511,13 @@ private void addSplitsForGroup(List dirs, TableScanOperator tableScan, Job } if (tableScan != null) { + StringBuilder readColumnsBuffer = new StringBuilder(); + StringBuilder readColumnNamesBuffer =new StringBuilder(); + + // push down projections. + ColumnProjectionUtils.appendReadColumns(readColumnsBuffer, readColumnNamesBuffer, tableScan.getNeededColumnIDs(), + tableScan.getNeededColumns()); + pushProjection(conf, readColumnsBuffer, readColumnNamesBuffer); pushFilters(conf, tableScan, this.mrwork); } @@ -649,12 +656,6 @@ private static void processForWriteIds(Path dir, JobConf conf, TableDesc currentTable = null; TableScanOperator currentTableScan = null; - boolean pushDownProjection = false; - //Buffers to hold filter pushdown information - StringBuilder readColumnsBuffer = new StringBuilder(newjob. - get(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR, ""));; - StringBuilder readColumnNamesBuffer = new StringBuilder(newjob. - get(ColumnProjectionUtils.READ_COLUMN_NAMES_CONF_STR, "")); // for each dir, get the InputFormat, and do getSplits. for (Path dir : dirs) { PartitionDesc part = getPartitionDescFromPath(pathToPartitionInfo, dir); @@ -669,15 +670,6 @@ private static void processForWriteIds(Path dir, JobConf conf, Operator op = mrwork.getAliasToWork().get(aliases.get(0)); if ((op != null) && (op instanceof TableScanOperator)) { tableScan = (TableScanOperator) op; - //Reset buffers to store filter push down columns - readColumnsBuffer.setLength(0); - readColumnNamesBuffer.setLength(0); - // push down projections. - ColumnProjectionUtils.appendReadColumns(readColumnsBuffer, readColumnNamesBuffer, - tableScan.getNeededColumnIDs(), tableScan.getNeededColumns()); - pushDownProjection = true; - // push down filters - pushFilters(newjob, tableScan, this.mrwork); } } else { if (LOG.isDebugEnabled()) { @@ -697,12 +689,6 @@ private static void processForWriteIds(Path dir, JobConf conf, if (LOG.isInfoEnabled()) { LOG.info("Generating splits as currentDirs is not empty. currentDirs: {}", currentDirs); } - - // set columns to read in conf - if (pushDownProjection) { - pushProjection(newjob, readColumnsBuffer, readColumnNamesBuffer); - } - addSplitsForGroup(currentDirs, currentTableScan, newjob, getInputFormatFromCache(currentInputFormatClass, job), currentInputFormatClass, currentDirs.size()*(numSplits / dirs.length), @@ -716,11 +702,6 @@ private static void processForWriteIds(Path dir, JobConf conf, currentInputFormatClass = inputFormatClass; } - // set columns to read in conf - if (pushDownProjection) { - pushProjection(newjob, readColumnsBuffer, readColumnNamesBuffer); - } - if (dirs.length != 0) { if (LOG.isInfoEnabled()) { LOG.info("Generating splits for dirs: {}", dirs);