diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 9ed2c61..6b7c463 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -31,7 +31,6 @@ import java.io.DataInput; import java.io.EOFException; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java index fa6b548..cfcfe17 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java @@ -56,14 +56,6 @@ public SelectDesc( this.outputColumnNames = outputColumnNames; } - public SelectDesc( - final List colList, - final boolean selectStar, final boolean selStarNoCompute) { - this.colList = colList; - this.selectStar = selectStar; - this.selStarNoCompute = selStarNoCompute; - } - @Override public Object clone() { SelectDesc ret = new SelectDesc(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 41862e6..730823f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.io.AcidUtils; -import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -47,7 +47,6 @@ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; -import org.apache.hadoop.hive.ql.plan.ExtractDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.MapredWork; @@ -94,7 +93,7 @@ tmppath = new Path(tmpdir); fs = FileSystem.get(conf); - if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) { + if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDirectory()) { throw new RuntimeException(tmpdir + " exists but is not a directory"); } @@ -137,7 +136,7 @@ for (String src : srctables) { db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true); db.createTable(src, cols, null, TextInputFormat.class, - IgnoreKeyTextOutputFormat.class); + HiveIgnoreKeyTextOutputFormat.class); db.loadTable(hadoopDataFile[i], src, false, false, true, false, false); i++; } @@ -161,20 +160,19 @@ public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan1.out"), Utilities.defaultTd, false)); - Operator op2 = OperatorFactory.get(new ExtractDesc( - getStringColumn(Utilities.ReduceField.VALUE.toString())), op3); + List cols = new ArrayList(); + cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); + List colNames = new ArrayList(); + colNames.add(HiveConf.getColumnInternalName(2)); + Operator op2 = OperatorFactory.get(new SelectDesc(cols, colNames), op3); rWork.setReducer(op2); } @@ -292,8 +293,10 @@ private void populateMapRedPlan2(Table src) throws Exception { Operator op3 = OperatorFactory.get(getTestFilterDesc("0"), op4); - Operator op2 = OperatorFactory.get(new ExtractDesc( - getStringColumn(Utilities.ReduceField.VALUE.toString())), op3); + List cols = new ArrayList(); + cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); + cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); + Operator op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3); rWork.setReducer(op2); } @@ -376,10 +379,10 @@ private void populateMapRedPlan4(Table src) throws SemanticException { // reduce side work Operator op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan4.out"), Utilities.defaultTd, false)); - - Operator op2 = OperatorFactory.get(new ExtractDesc( - getStringColumn(Utilities.ReduceField.VALUE.toString())), op3); - + List cols = new ArrayList(); + cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); + cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); + Operator op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3); rWork.setReducer(op2); } @@ -416,9 +419,10 @@ private void populateMapRedPlan5(Table src) throws SemanticException { Operator op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator + "mapredplan5.out"), Utilities.defaultTd, false)); - Operator op2 = OperatorFactory.get(new ExtractDesc( - getStringColumn(Utilities.ReduceField.VALUE.toString())), op3); - + List cols = new ArrayList(); + cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); + cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); + Operator op2 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op3); rWork.setReducer(op2); } @@ -459,8 +463,10 @@ private void populateMapRedPlan6(Table src) throws Exception { Operator op2 = OperatorFactory.get(getTestFilterDesc("0"), op3); - Operator op5 = OperatorFactory.get(new ExtractDesc( - getStringColumn(Utilities.ReduceField.VALUE.toString())), op2); + List cols = new ArrayList(); + cols.add(getStringColumn(Utilities.ReduceField.KEY + ".reducesinkkey" + 0)); + cols.add(getStringColumn(Utilities.ReduceField.VALUE.toString()+"."+outputColumns.get(1))); + Operator op5 = OperatorFactory.get(new SelectDesc(cols, outputColumns), op2); rWork.setReducer(op5); }