Index: contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java
===================================================================
--- contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (revision 901960)
+++ contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (working copy)
@@ -22,7 +22,7 @@
import java.util.List;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.description;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -30,7 +30,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-@description(name = "explode2", value = "_FUNC_(a) - like explode, but outputs two identical columns (for "
+@Description(name = "explode2", value = "_FUNC_(a) - like explode, but outputs two identical columns (for "
+ "testing purposes)")
public class GenericUDTFExplode2 extends GenericUDTF {
Index: contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java
===================================================================
--- contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java (revision 901960)
+++ contrib/src/java/org/apache/hadoop/hive/contrib/genericudf/example/GenericUDFDBOutput.java (working copy)
@@ -25,7 +25,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.exec.description;
+import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
@@ -51,7 +51,7 @@
* Use hive's ADD JAR feature to add your JDBC Driver to the distributed cache,
* otherwise GenericUDFDBoutput will fail.
*/
-@description(name = "dboutput", value = "_FUNC_(jdbcstring,username,password,preparedstatement,[arguments]) - sends data to a jdbc driver", extended = "argument 0 is the JDBC connection string\n"
+@Description(name = "dboutput", value = "_FUNC_(jdbcstring,username,password,preparedstatement,[arguments]) - sends data to a jdbc driver", extended = "argument 0 is the JDBC connection string\n"
+ "argument 1 is the user name\n"
+ "argument 2 is the password\n"
+ "argument 3 is an SQL query to be used in the PreparedStatement\n"
Index: ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java (revision 901960)
+++ ql/src/test/org/apache/hadoop/hive/scripts/extracturl.java (working copy)
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.scripts;
-
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class extracturl {
-
- protected static final Pattern pattern = Pattern.compile(
- "link",
- Pattern.CASE_INSENSITIVE);
- static InputStreamReader converter = new InputStreamReader(System.in);
- static BufferedReader in = new BufferedReader(converter);
-
- public static void main(String[] args) {
- String input;
- try {
- while ((input = in.readLine()) != null) {
- Matcher m = pattern.matcher(input);
-
- while (m.find()) {
- String url = input.substring(m.start(1), m.end(1));
- System.out.println(url + "\t" + "1");
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- System.exit(1);
- }
- }
-}
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (revision 901960)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (working copy)
@@ -37,18 +37,18 @@
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.extractDesc;
-import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
-import org.apache.hadoop.hive.ql.plan.filterDesc;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
-import org.apache.hadoop.hive.ql.plan.reduceSinkDesc;
-import org.apache.hadoop.hive.ql.plan.scriptDesc;
-import org.apache.hadoop.hive.ql.plan.selectDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.ExtractDesc;
+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.ScriptDesc;
+import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.TextInputFormat;
@@ -127,7 +127,7 @@
}
}
- mapredWork mr;
+ MapredWork mr;
protected void setUp() {
mr = PlanUtils.getMapRedWork();
@@ -157,39 +157,39 @@
}
}
- private filterDesc getTestFilterDesc(String column) {
- ArrayList children1 = new ArrayList();
- children1.add(new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
+ private FilterDesc getTestFilterDesc(String column) {
+ ArrayList children1 = new ArrayList();
+ children1.add(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
column, "", false));
- exprNodeDesc lhs = new exprNodeGenericFuncDesc(
+ ExprNodeDesc lhs = new ExprNodeGenericFuncDesc(
TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);
- ArrayList children2 = new ArrayList();
- children2.add(new exprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
+ ArrayList children2 = new ArrayList();
+ children2.add(new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
.valueOf(100)));
- exprNodeDesc rhs = new exprNodeGenericFuncDesc(
+ ExprNodeDesc rhs = new ExprNodeGenericFuncDesc(
TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);
- ArrayList children3 = new ArrayList();
+ ArrayList children3 = new ArrayList();
children3.add(lhs);
children3.add(rhs);
- exprNodeDesc desc = new exprNodeGenericFuncDesc(
+ ExprNodeDesc desc = new ExprNodeGenericFuncDesc(
TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo("<")
.getGenericUDF(), children3);
- return new filterDesc(desc, false);
+ return new FilterDesc(desc, false);
}
@SuppressWarnings("unchecked")
private void populateMapPlan1(Table src) {
mr.setNumReduceTasks(Integer.valueOf(0));
- Operator op2 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op2 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapplan1.out", Utilities.defaultTd, true));
- Operator op1 = OperatorFactory.get(getTestFilterDesc("key"),
+ Operator op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
Utilities.addMapWork(mr, src, "a", op1);
@@ -199,15 +199,15 @@
private void populateMapPlan2(Table src) {
mr.setNumReduceTasks(Integer.valueOf(0));
- Operator op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapplan2.out", Utilities.defaultTd, false));
- Operator op2 = OperatorFactory.get(new scriptDesc("/bin/cat",
+ Operator op2 = OperatorFactory.get(new ScriptDesc("/bin/cat",
PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
+ Utilities.tabCode, "key,value"), TextRecordReader.class), op3);
- Operator op1 = OperatorFactory.get(getTestFilterDesc("key"),
+ Operator op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
Utilities.addMapWork(mr, src, "a", op1);
@@ -222,7 +222,7 @@
outputColumns.add("_col" + i);
}
// map-side work
- Operator op1 = OperatorFactory.get(PlanUtils
+ Operator op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
Utilities.makeList(getStringColumn("value")), outputColumns, true,
-1, 1, -1));
@@ -232,10 +232,10 @@
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
// reduce side work
- Operator op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapredplan1.out", Utilities.defaultTd, false));
- Operator op2 = OperatorFactory.get(new extractDesc(
+ Operator op2 = OperatorFactory.get(new ExtractDesc(
getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
@@ -249,7 +249,7 @@
outputColumns.add("_col" + i);
}
// map-side work
- Operator op1 = OperatorFactory.get(PlanUtils
+ Operator op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
@@ -260,12 +260,12 @@
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
// reduce side work
- Operator op4 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapredplan2.out", Utilities.defaultTd, false));
- Operator op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
+ Operator op3 = OperatorFactory.get(getTestFilterDesc("0"), op4);
- Operator op2 = OperatorFactory.get(new extractDesc(
+ Operator op2 = OperatorFactory.get(new ExtractDesc(
getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
@@ -283,7 +283,7 @@
outputColumns.add("_col" + i);
}
// map-side work
- Operator op1 = OperatorFactory.get(PlanUtils
+ Operator op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
Utilities.makeList(getStringColumn("value")), outputColumns, true,
Byte.valueOf((byte) 0), 1, -1));
@@ -292,7 +292,7 @@
mr.setKeyDesc(op1.getConf().getKeySerializeInfo());
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
- Operator op2 = OperatorFactory.get(PlanUtils
+ Operator op2 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("key")),
Utilities.makeList(getStringColumn("key")), outputColumns, true,
Byte.valueOf((byte) 1), Integer.MAX_VALUE, -1));
@@ -301,13 +301,13 @@
mr.getTagToValueDesc().add(op2.getConf().getValueSerializeInfo());
// reduce side work
- Operator op4 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op4 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapredplan3.out", Utilities.defaultTd, false));
- Operator op5 = OperatorFactory.get(new selectDesc(Utilities
+ Operator op5 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn(Utilities.ReduceField.ALIAS.toString()),
- new exprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
- new exprNodeColumnDesc(TypeInfoFactory
+ new ExprNodeFieldDesc(TypeInfoFactory.stringTypeInfo,
+ new ExprNodeColumnDesc(TypeInfoFactory
.getListTypeInfo(TypeInfoFactory.stringTypeInfo),
Utilities.ReduceField.VALUE.toString(), "", false), "0",
false)), outputColumns), op4);
@@ -324,17 +324,17 @@
for (int i = 0; i < 2; i++) {
outputColumns.add("_col" + i);
}
- Operator op1 = OperatorFactory.get(PlanUtils
+ Operator op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
Utilities.makeList(getStringColumn("tkey"),
getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
- Operator op0 = OperatorFactory.get(new scriptDesc("/bin/cat",
+ Operator op0 = OperatorFactory.get(new ScriptDesc("/bin/cat",
PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
+ Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class), op1);
- Operator op4 = OperatorFactory.get(new selectDesc(Utilities
+ Operator op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns), op0);
@@ -343,17 +343,17 @@
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
// reduce side work
- Operator op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapredplan4.out", Utilities.defaultTd, false));
- Operator op2 = OperatorFactory.get(new extractDesc(
+ Operator op2 = OperatorFactory.get(new ExtractDesc(
getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
}
- public static exprNodeColumnDesc getStringColumn(String columnName) {
- return new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName,
+ public static ExprNodeColumnDesc getStringColumn(String columnName) {
+ return new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, columnName,
"", false);
}
@@ -366,12 +366,12 @@
for (int i = 0; i < 2; i++) {
outputColumns.add("_col" + i);
}
- Operator op0 = OperatorFactory.get(PlanUtils
+ Operator op0 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("0")), Utilities
.makeList(getStringColumn("0"), getStringColumn("1")),
outputColumns, false, -1, 1, -1));
- Operator op4 = OperatorFactory.get(new selectDesc(Utilities
+ Operator op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns), op0);
@@ -380,10 +380,10 @@
mr.getTagToValueDesc().add(op0.getConf().getValueSerializeInfo());
// reduce side work
- Operator op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapredplan5.out", Utilities.defaultTd, false));
- Operator op2 = OperatorFactory.get(new extractDesc(
+ Operator op2 = OperatorFactory.get(new ExtractDesc(
getStringColumn(Utilities.ReduceField.VALUE.toString())), op3);
mr.setReducer(op2);
@@ -398,18 +398,18 @@
for (int i = 0; i < 2; i++) {
outputColumns.add("_col" + i);
}
- Operator op1 = OperatorFactory.get(PlanUtils
+ Operator op1 = OperatorFactory.get(PlanUtils
.getReduceSinkDesc(Utilities.makeList(getStringColumn("tkey")),
Utilities.makeList(getStringColumn("tkey"),
getStringColumn("tvalue")), outputColumns, false, -1, 1, -1));
- Operator op0 = OperatorFactory.get(new scriptDesc(
+ Operator op0 = OperatorFactory.get(new ScriptDesc(
"\'/bin/cat\'", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode,
"tkey,tvalue"), TextRecordWriter.class, PlanUtils
.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
TextRecordReader.class), op1);
- Operator op4 = OperatorFactory.get(new selectDesc(Utilities
+ Operator op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
outputColumns), op0);
@@ -418,12 +418,12 @@
mr.getTagToValueDesc().add(op1.getConf().getValueSerializeInfo());
// reduce side work
- Operator op3 = OperatorFactory.get(new fileSinkDesc(tmpdir
+ Operator op3 = OperatorFactory.get(new FileSinkDesc(tmpdir
+ "mapredplan6.out", Utilities.defaultTd, false));
- Operator op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
+ Operator op2 = OperatorFactory.get(getTestFilterDesc("0"), op3);
- Operator op5 = OperatorFactory.get(new extractDesc(
+ Operator op5 = OperatorFactory.get(new ExtractDesc(
getStringColumn(Utilities.ReduceField.VALUE.toString())), op2);
mr.setReducer(op5);
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (revision 901960)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (working copy)
@@ -24,10 +24,10 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -94,7 +94,7 @@
public void testExprNodeColumnEvaluator() throws Throwable {
try {
// get a evaluator for a simple field expression
- exprNodeDesc exprDesc = new exprNodeColumnDesc(colaType, "cola", "",
+ ExprNodeDesc exprDesc = new ExprNodeColumnDesc(colaType, "cola", "",
false);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
@@ -112,16 +112,16 @@
}
}
- private static exprNodeDesc getListIndexNode(exprNodeDesc node, int index) {
- return getListIndexNode(node, new exprNodeConstantDesc(index));
+ private static ExprNodeDesc getListIndexNode(ExprNodeDesc node, int index) {
+ return getListIndexNode(node, new ExprNodeConstantDesc(index));
}
- private static exprNodeDesc getListIndexNode(exprNodeDesc node,
- exprNodeDesc index) {
- ArrayList children = new ArrayList(2);
+ private static ExprNodeDesc getListIndexNode(ExprNodeDesc node,
+ ExprNodeDesc index) {
+ ArrayList children = new ArrayList(2);
children.add(node);
children.add(index);
- return new exprNodeGenericFuncDesc(((ListTypeInfo) node.getTypeInfo())
+ return new ExprNodeGenericFuncDesc(((ListTypeInfo) node.getTypeInfo())
.getListElementTypeInfo(), FunctionRegistry.getGenericUDFForIndex(),
children);
}
@@ -129,13 +129,13 @@
public void testExprNodeFuncEvaluator() throws Throwable {
try {
// get a evaluator for a string concatenation expression
- exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "",
+ ExprNodeDesc col1desc = new ExprNodeColumnDesc(col1Type, "col1", "",
false);
- exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola", "",
+ ExprNodeDesc coladesc = new ExprNodeColumnDesc(colaType, "cola", "",
false);
- exprNodeDesc col11desc = getListIndexNode(col1desc, 1);
- exprNodeDesc cola0desc = getListIndexNode(coladesc, 0);
- exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc col11desc = getListIndexNode(col1desc, 1);
+ ExprNodeDesc cola0desc = getListIndexNode(coladesc, 0);
+ ExprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat", col11desc, cola0desc);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
@@ -154,10 +154,10 @@
public void testExprNodeConversionEvaluator() throws Throwable {
try {
// get a evaluator for a string concatenation expression
- exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1", "",
+ ExprNodeDesc col1desc = new ExprNodeColumnDesc(col1Type, "col1", "",
false);
- exprNodeDesc col11desc = getListIndexNode(col1desc, 1);
- exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc col11desc = getListIndexNode(col1desc, 1);
+ ExprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc(Constants.DOUBLE_TYPE_NAME, col11desc);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);
@@ -199,15 +199,15 @@
int basetimes = 100000;
measureSpeed("1 + 2", basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(
- "+", new exprNodeConstantDesc(1), new exprNodeConstantDesc(2))),
+ "+", new ExprNodeConstantDesc(1), new ExprNodeConstantDesc(2))),
r, Integer.valueOf(1 + 2));
measureSpeed("1 + 2 - 3", basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("-",
TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("+", new exprNodeConstantDesc(1),
- new exprNodeConstantDesc(2)),
- new exprNodeConstantDesc(3))), r, Integer.valueOf(1 + 2 - 3));
+ .getFuncExprNodeDesc("+", new ExprNodeConstantDesc(1),
+ new ExprNodeConstantDesc(2)),
+ new ExprNodeConstantDesc(3))), r, Integer.valueOf(1 + 2 - 3));
measureSpeed("1 + 2 - 3 + 4", basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("+",
@@ -215,25 +215,25 @@
.getFuncExprNodeDesc("-",
TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("+",
- new exprNodeConstantDesc(1),
- new exprNodeConstantDesc(2)),
- new exprNodeConstantDesc(3)),
- new exprNodeConstantDesc(4))), r, Integer
+ new ExprNodeConstantDesc(1),
+ new ExprNodeConstantDesc(2)),
+ new ExprNodeConstantDesc(3)),
+ new ExprNodeConstantDesc(4))), r, Integer
.valueOf(1 + 2 - 3 + 4));
measureSpeed("concat(\"1\", \"2\")", basetimes * 100,
ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
- .getFuncExprNodeDesc("concat", new exprNodeConstantDesc("1"),
- new exprNodeConstantDesc("2"))), r, "12");
+ .getFuncExprNodeDesc("concat", new ExprNodeConstantDesc("1"),
+ new ExprNodeConstantDesc("2"))), r, "12");
measureSpeed("concat(concat(\"1\", \"2\"), \"3\")", basetimes * 100,
ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat",
- new exprNodeConstantDesc("1"),
- new exprNodeConstantDesc("2")),
- new exprNodeConstantDesc("3"))), r, "123");
+ new ExprNodeConstantDesc("1"),
+ new ExprNodeConstantDesc("2")),
+ new ExprNodeConstantDesc("3"))), r, "123");
measureSpeed("concat(concat(concat(\"1\", \"2\"), \"3\"), \"4\")",
basetimes * 100, ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
@@ -242,18 +242,18 @@
.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat",
- new exprNodeConstantDesc("1"),
- new exprNodeConstantDesc("2")),
- new exprNodeConstantDesc("3")),
- new exprNodeConstantDesc("4"))), r, "1234");
- exprNodeDesc constant1 = new exprNodeConstantDesc(1);
- exprNodeDesc constant2 = new exprNodeConstantDesc(2);
+ new ExprNodeConstantDesc("1"),
+ new ExprNodeConstantDesc("2")),
+ new ExprNodeConstantDesc("3")),
+ new ExprNodeConstantDesc("4"))), r, "1234");
+ ExprNodeDesc constant1 = new ExprNodeConstantDesc(1);
+ ExprNodeDesc constant2 = new ExprNodeConstantDesc(2);
measureSpeed("concat(col1[1], cola[1])", basetimes * 10,
ExprNodeEvaluatorFactory
.get(TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat", getListIndexNode(
- new exprNodeColumnDesc(col1Type, "col1", "", false),
- constant1), getListIndexNode(new exprNodeColumnDesc(
+ new ExprNodeColumnDesc(col1Type, "col1", "", false),
+ constant1), getListIndexNode(new ExprNodeColumnDesc(
colaType, "cola", "", false), constant1))), r, "1b");
measureSpeed("concat(concat(col1[1], cola[1]), col1[2])", basetimes * 10,
ExprNodeEvaluatorFactory
@@ -261,11 +261,11 @@
.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat", getListIndexNode(
- new exprNodeColumnDesc(col1Type, "col1", "",
+ new ExprNodeColumnDesc(col1Type, "col1", "",
false), constant1), getListIndexNode(
- new exprNodeColumnDesc(colaType, "cola", "",
+ new ExprNodeColumnDesc(colaType, "cola", "",
false), constant1)), getListIndexNode(
- new exprNodeColumnDesc(col1Type, "col1", "", false),
+ new ExprNodeColumnDesc(col1Type, "col1", "", false),
constant2))), r, "1b2");
measureSpeed(
"concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
@@ -276,14 +276,14 @@
.getFuncExprNodeDesc("concat",
TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat",
- getListIndexNode(new exprNodeColumnDesc(
+ getListIndexNode(new ExprNodeColumnDesc(
col1Type, "col1", "", false),
constant1), getListIndexNode(
- new exprNodeColumnDesc(colaType,
+ new ExprNodeColumnDesc(colaType,
"cola", "", false), constant1)),
- getListIndexNode(new exprNodeColumnDesc(col1Type,
+ getListIndexNode(new ExprNodeColumnDesc(col1Type,
"col1", "", false), constant2)),
- getListIndexNode(new exprNodeColumnDesc(colaType, "cola",
+ getListIndexNode(new ExprNodeColumnDesc(colaType, "cola",
"", false), constant2))), r, "1b2c");
} catch (Throwable e) {
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (revision 901960)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (working copy)
@@ -26,12 +26,12 @@
import junit.framework.TestCase;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.filterDesc;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
-import org.apache.hadoop.hive.ql.plan.partitionDesc;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.mapred.JobConf;
@@ -44,15 +44,15 @@
try {
// initialize a complete map reduce configuration
- exprNodeDesc expr1 = new exprNodeColumnDesc(
+ ExprNodeDesc expr1 = new ExprNodeColumnDesc(
TypeInfoFactory.stringTypeInfo, F1, "", false);
- exprNodeDesc expr2 = new exprNodeColumnDesc(
+ ExprNodeDesc expr2 = new ExprNodeColumnDesc(
TypeInfoFactory.stringTypeInfo, F2, "", false);
- exprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("==", expr1, expr2);
- filterDesc filterCtx = new filterDesc(filterExpr, false);
- Operator op = OperatorFactory.get(filterDesc.class);
+ FilterDesc filterCtx = new FilterDesc(filterExpr, false);
+ Operator op = OperatorFactory.get(FilterDesc.class);
op.setConf(filterCtx);
ArrayList aliasList = new ArrayList();
@@ -60,15 +60,15 @@
LinkedHashMap> pa = new LinkedHashMap>();
pa.put("/tmp/testfolder", aliasList);
- tableDesc tblDesc = Utilities.defaultTd;
- partitionDesc partDesc = new partitionDesc(tblDesc, null);
- LinkedHashMap pt = new LinkedHashMap();
+ TableDesc tblDesc = Utilities.defaultTd;
+ PartitionDesc partDesc = new PartitionDesc(tblDesc, null);
+ LinkedHashMap pt = new LinkedHashMap();
pt.put("/tmp/testfolder", partDesc);
LinkedHashMap> ao = new LinkedHashMap>();
ao.put("a", op);
- mapredWork mrwork = new mapredWork();
+ MapredWork mrwork = new MapredWork();
mrwork.setPathToAliases(pa);
mrwork.setPathToPartitionInfo(pt);
mrwork.setAliasToWork(ao);
@@ -83,7 +83,7 @@
JobConf job = new JobConf(TestPlan.class);
job.set("fs.default.name", "file:///");
Utilities.setMapRedWork(job, mrwork);
- mapredWork mrwork2 = Utilities.getMapRedWork(job);
+ MapredWork mrwork2 = Utilities.getMapRedWork(job);
Utilities.clearMapRedWork(job);
// over here we should have some checks of the deserialized object against
Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (revision 901960)
+++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (working copy)
@@ -29,15 +29,15 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.collectDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.filterDesc;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
-import org.apache.hadoop.hive.ql.plan.partitionDesc;
-import org.apache.hadoop.hive.ql.plan.scriptDesc;
-import org.apache.hadoop.hive.ql.plan.selectDesc;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.plan.CollectDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.ScriptDesc;
+import org.apache.hadoop.hive.ql.plan.SelectDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -87,21 +87,21 @@
public void testBaseFilterOperator() throws Throwable {
try {
System.out.println("Testing Filter Operator");
- exprNodeDesc col0 = TestExecDriver.getStringColumn("col0");
- exprNodeDesc col1 = TestExecDriver.getStringColumn("col1");
- exprNodeDesc col2 = TestExecDriver.getStringColumn("col2");
- exprNodeDesc zero = new exprNodeConstantDesc("0");
- exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc col0 = TestExecDriver.getStringColumn("col0");
+ ExprNodeDesc col1 = TestExecDriver.getStringColumn("col1");
+ ExprNodeDesc col2 = TestExecDriver.getStringColumn("col2");
+ ExprNodeDesc zero = new ExprNodeConstantDesc("0");
+ ExprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc(">", col2, col1);
- exprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("==", col0, zero);
- exprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("and", func1, func2);
assert (func3 != null);
- filterDesc filterCtx = new filterDesc(func3, false);
+ FilterDesc filterCtx = new FilterDesc(func3, false);
// Configuration
- Operator op = OperatorFactory.get(filterDesc.class);
+ Operator op = OperatorFactory.get(FilterDesc.class);
op.setConf(filterCtx);
// runtime initialization
@@ -137,32 +137,32 @@
try {
System.out.println("Testing FileSink Operator");
// col1
- exprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
+ ExprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
// col2
- exprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
- exprNodeDesc expr2 = new exprNodeConstantDesc("1");
- exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
+ ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
+ ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat", expr1, expr2);
// select operator to project these two columns
- ArrayList earr = new ArrayList();
+ ArrayList earr = new ArrayList();
earr.add(exprDesc1);
earr.add(exprDesc2);
ArrayList outputCols = new ArrayList();
for (int i = 0; i < earr.size(); i++) {
outputCols.add("_col" + i);
}
- selectDesc selectCtx = new selectDesc(earr, outputCols);
- Operator op = OperatorFactory.get(selectDesc.class);
+ SelectDesc selectCtx = new SelectDesc(earr, outputCols);
+ Operator op = OperatorFactory.get(SelectDesc.class);
op.setConf(selectCtx);
// fileSinkOperator to dump the output of the select
- // fileSinkDesc fsd = new fileSinkDesc ("file:///tmp" + File.separator +
+ // FileSinkDesc fsd = new FileSinkDesc ("file:///tmp" + File.separator +
// System.getProperty("user.name") + File.separator +
// "TestFileSinkOperator",
// Utilities.defaultTd, false);
- // Operator flop = OperatorFactory.getAndMakeChild(fsd, op);
+ // Operator flop = OperatorFactory.getAndMakeChild(fsd, op);
op.initialize(new JobConf(TestOperators.class),
new ObjectInspector[] { r[0].oi });
@@ -185,37 +185,37 @@
try {
System.out.println("Testing Script Operator");
// col1
- exprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
+ ExprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");
// col2
- exprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
- exprNodeDesc expr2 = new exprNodeConstantDesc("1");
- exprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
+ ExprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
+ ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
+ ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("concat", expr1, expr2);
// select operator to project these two columns
- ArrayList earr = new ArrayList();
+ ArrayList earr = new ArrayList();
earr.add(exprDesc1);
earr.add(exprDesc2);
ArrayList outputCols = new ArrayList();
for (int i = 0; i < earr.size(); i++) {
outputCols.add("_col" + i);
}
- selectDesc selectCtx = new selectDesc(earr, outputCols);
- Operator op = OperatorFactory.get(selectDesc.class);
+ SelectDesc selectCtx = new SelectDesc(earr, outputCols);
+ Operator op = OperatorFactory.get(SelectDesc.class);
op.setConf(selectCtx);
// scriptOperator to echo the output of the select
- tableDesc scriptOutput = PlanUtils.getDefaultTableDesc(""
+ TableDesc scriptOutput = PlanUtils.getDefaultTableDesc(""
+ Utilities.tabCode, "a,b");
- tableDesc scriptInput = PlanUtils.getDefaultTableDesc(""
+ TableDesc scriptInput = PlanUtils.getDefaultTableDesc(""
+ Utilities.tabCode, "a,b");
- scriptDesc sd = new scriptDesc("cat", scriptOutput,
+ ScriptDesc sd = new ScriptDesc("cat", scriptOutput,
TextRecordWriter.class, scriptInput, TextRecordReader.class);
- Operator sop = OperatorFactory.getAndMakeChild(sd, op);
+ Operator sop = OperatorFactory.getAndMakeChild(sd, op);
// Collect operator to observe the output of the script
- collectDesc cd = new collectDesc(Integer.valueOf(10));
+ CollectDesc cd = new CollectDesc(Integer.valueOf(10));
CollectOperator cdop = (CollectOperator) OperatorFactory.getAndMakeChild(
cd, sop);
@@ -270,25 +270,25 @@
// initialize pathToTableInfo
// Default: treat the table as a single column "col"
- tableDesc td = Utilities.defaultTd;
- partitionDesc pd = new partitionDesc(td, null);
- LinkedHashMap pathToPartitionInfo = new LinkedHashMap();
+ TableDesc td = Utilities.defaultTd;
+ PartitionDesc pd = new PartitionDesc(td, null);
+ LinkedHashMap pathToPartitionInfo = new LinkedHashMap();
pathToPartitionInfo.put("/testDir", pd);
// initialize aliasToWork
- collectDesc cd = new collectDesc(Integer.valueOf(1));
+ CollectDesc cd = new CollectDesc(Integer.valueOf(1));
CollectOperator cdop1 = (CollectOperator) OperatorFactory
- .get(collectDesc.class);
+ .get(CollectDesc.class);
cdop1.setConf(cd);
CollectOperator cdop2 = (CollectOperator) OperatorFactory
- .get(collectDesc.class);
+ .get(CollectDesc.class);
cdop2.setConf(cd);
LinkedHashMap> aliasToWork = new LinkedHashMap>();
aliasToWork.put("a", cdop1);
aliasToWork.put("b", cdop2);
- // initialize mapredWork
- mapredWork mrwork = new mapredWork();
+ // initialize MapredWork
+ MapredWork mrwork = new MapredWork();
mrwork.setPathToAliases(pathToAliases);
mrwork.setPathToPartitionInfo(pathToPartitionInfo);
mrwork.setAliasToWork(aliasToWork);
Index: ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
===================================================================
--- ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (revision 901960)
+++ ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (working copy)
@@ -36,4 +36,4 @@
result.set(s.toString().length());
return result;
}
-}
\ No newline at end of file
+}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (working copy)
@@ -35,7 +35,7 @@
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
/**
* Processor Context for creating map reduce task. Walk the tree in a DFS manner
@@ -95,12 +95,12 @@
public static class GenMRUnionCtx {
Task extends Serializable> uTask;
List taskTmpDir;
- List tt_desc;
+ List tt_desc;
public GenMRUnionCtx() {
uTask = null;
taskTmpDir = new ArrayList();
- tt_desc = new ArrayList();
+ tt_desc = new ArrayList();
}
public Task extends Serializable> getUTask() {
@@ -119,18 +119,18 @@
return taskTmpDir;
}
- public void addTTDesc(tableDesc tt_desc) {
+ public void addTTDesc(TableDesc tt_desc) {
this.tt_desc.add(tt_desc);
}
- public List getTTDesc() {
+ public List getTTDesc() {
return tt_desc;
}
}
public static class GenMRMapJoinCtx {
String taskTmpDir;
- tableDesc tt_desc;
+ TableDesc tt_desc;
Operator extends Serializable> rootMapJoinOp;
MapJoinOperator oldMapJoin;
@@ -147,7 +147,7 @@
* @param rootMapJoinOp
* @param oldMapJoin
*/
- public GenMRMapJoinCtx(String taskTmpDir, tableDesc tt_desc,
+ public GenMRMapJoinCtx(String taskTmpDir, TableDesc tt_desc,
Operator extends Serializable> rootMapJoinOp,
MapJoinOperator oldMapJoin) {
this.taskTmpDir = taskTmpDir;
@@ -164,11 +164,11 @@
return taskTmpDir;
}
- public void setTTDesc(tableDesc tt_desc) {
+ public void setTTDesc(TableDesc tt_desc) {
this.tt_desc = tt_desc;
}
- public tableDesc getTTDesc() {
+ public TableDesc getTTDesc() {
return tt_desc;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRUnion1.java (working copy)
@@ -45,10 +45,10 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
-import org.apache.hadoop.hive.ql.plan.partitionDesc;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
/**
* Processor for the rule - TableScan followed by Union
@@ -125,7 +125,7 @@
Operator extends Serializable> parent = union.getParentOperators().get(
pos);
- mapredWork uPlan = null;
+ MapredWork uPlan = null;
// union is encountered for the first time
if (uCtxTask == null) {
@@ -136,7 +136,7 @@
ctx.setUnionTask(union, uCtxTask);
} else {
uTask = uCtxTask.getUTask();
- uPlan = (mapredWork) uTask.getWork();
+ uPlan = (MapredWork) uTask.getWork();
}
// If there is a mapjoin at position 'pos'
@@ -145,19 +145,19 @@
assert mjOp != null;
GenMRMapJoinCtx mjCtx = ctx.getMapJoinCtx(mjOp);
assert mjCtx != null;
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
String taskTmpDir = mjCtx.getTaskTmpDir();
- tableDesc tt_desc = mjCtx.getTTDesc();
+ TableDesc tt_desc = mjCtx.getTTDesc();
assert plan.getPathToAliases().get(taskTmpDir) == null;
plan.getPathToAliases().put(taskTmpDir, new ArrayList());
plan.getPathToAliases().get(taskTmpDir).add(taskTmpDir);
plan.getPathToPartitionInfo().put(taskTmpDir,
- new partitionDesc(tt_desc, null));
+ new PartitionDesc(tt_desc, null));
plan.getAliasToWork().put(taskTmpDir, mjCtx.getRootMapJoinOp());
}
- tableDesc tt_desc = PlanUtils.getIntermediateFileTableDesc(PlanUtils
+ TableDesc tt_desc = PlanUtils.getIntermediateFileTableDesc(PlanUtils
.getFieldSchemasFromRowSchema(parent.getSchema(), "temporarycol"));
// generate the temporary file
@@ -174,7 +174,7 @@
// Create a file sink operator for this file name
Operator extends Serializable> fs_op = OperatorFactory.get(
- new fileSinkDesc(taskTmpDir, tt_desc, parseCtx.getConf().getBoolVar(
+ new FileSinkDesc(taskTmpDir, tt_desc, parseCtx.getConf().getBoolVar(
HiveConf.ConfVars.COMPRESSINTERMEDIATE)), parent.getSchema());
assert parent.getChildOperators().size() == 1;
@@ -189,7 +189,7 @@
// If it is map-only task, add the files to be processed
if (uPrsCtx.getMapOnlySubq(pos) && uPrsCtx.getRootTask(pos)) {
GenMapRedUtils.setTaskPlan(ctx.getCurrAliasId(), ctx.getCurrTopOp(),
- (mapredWork) currTask.getWork(), false, ctx);
+ (MapredWork) currTask.getWork(), false, ctx);
}
ctx.setCurrTask(uTask);
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java (working copy)
@@ -52,14 +52,14 @@
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.aggregationDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.groupByDesc;
-import org.apache.hadoop.hive.ql.plan.joinDesc;
-import org.apache.hadoop.hive.ql.plan.mapJoinDesc;
-import org.apache.hadoop.hive.ql.plan.reduceSinkDesc;
-import org.apache.hadoop.hive.ql.plan.selectDesc;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.JoinDesc;
+import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.SelectDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
/**
* Factory for generating the different node processors used by ColumnPruner.
@@ -74,7 +74,7 @@
Object... nodeOutputs) throws SemanticException {
FilterOperator op = (FilterOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
- exprNodeDesc condn = op.getConf().getPredicate();
+ ExprNodeDesc condn = op.getConf().getPredicate();
// get list of columns used in the filter
List cl = condn.getCols();
// merge it with the downstream col list
@@ -102,16 +102,16 @@
GroupByOperator op = (GroupByOperator) nd;
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
List colLists = new ArrayList();
- groupByDesc conf = op.getConf();
- ArrayList keys = conf.getKeys();
- for (exprNodeDesc key : keys) {
+ GroupByDesc conf = op.getConf();
+ ArrayList keys = conf.getKeys();
+ for (ExprNodeDesc key : keys) {
colLists = Utilities.mergeUniqElems(colLists, key.getCols());
}
- ArrayList aggrs = conf.getAggregators();
- for (aggregationDesc aggr : aggrs) {
- ArrayList params = aggr.getParameters();
- for (exprNodeDesc param : params) {
+ ArrayList aggrs = conf.getAggregators();
+ for (AggregationDesc aggr : aggrs) {
+ ArrayList params = aggr.getParameters();
+ for (ExprNodeDesc param : params) {
colLists = Utilities.mergeUniqElems(colLists, param.getCols());
}
}
@@ -155,7 +155,7 @@
/**
* The Node Processor for Column Pruning on Table Scan Operators. It will
- * store needed columns in tableScanDesc.
+ * store needed columns in TableScanDesc.
*/
public static class ColumnPrunerTableScanProc implements NodeProcessor {
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
@@ -197,15 +197,15 @@
HashMap, OpParseContext> opToParseCtxMap = cppCtx
.getOpToParseCtxMap();
RowResolver redSinkRR = opToParseCtxMap.get(op).getRR();
- reduceSinkDesc conf = op.getConf();
+ ReduceSinkDesc conf = op.getConf();
List> childOperators = op
.getChildOperators();
List> parentOperators = op
.getParentOperators();
List colLists = new ArrayList();
- ArrayList keys = conf.getKeyCols();
- for (exprNodeDesc key : keys) {
+ ArrayList keys = conf.getKeyCols();
+ for (ExprNodeDesc key : keys) {
colLists = Utilities.mergeUniqElems(colLists, key.getCols());
}
@@ -222,9 +222,9 @@
flags[i] = false;
}
if (childJoinCols != null && childJoinCols.size() > 0) {
- Map exprMap = op.getColumnExprMap();
+ Map exprMap = op.getColumnExprMap();
for (String childCol : childJoinCols) {
- exprNodeDesc desc = exprMap.get(childCol);
+ ExprNodeDesc desc = exprMap.get(childCol);
int index = conf.getValueCols().indexOf(desc);
flags[index] = true;
String[] nm = redSinkRR.reverseLookup(childCol);
@@ -241,8 +241,8 @@
} else {
// Reduce Sink contains the columns needed - no need to aggregate from
// children
- ArrayList vals = conf.getValueCols();
- for (exprNodeDesc val : vals) {
+ ArrayList vals = conf.getValueCols();
+ for (ExprNodeDesc val : vals) {
colLists = Utilities.mergeUniqElems(colLists, val.getCols());
}
}
@@ -290,7 +290,7 @@
}
cols = cppCtx.genColLists(op);
- selectDesc conf = op.getConf();
+ SelectDesc conf = op.getConf();
// The input to the select does not matter. Go over the expressions
// and return the ones which have a marked column
cppCtx.getPrunedColLists().put(op,
@@ -301,16 +301,16 @@
}
// do we need to prune the select operator?
- List originalColList = op.getConf().getColList();
+ List originalColList = op.getConf().getColList();
List columns = new ArrayList();
- for (exprNodeDesc expr : originalColList) {
+ for (ExprNodeDesc expr : originalColList) {
Utilities.mergeUniqElems(columns, expr.getCols());
}
// by now, 'prunedCols' are columns used by child operators, and 'columns'
// are columns used by this select operator.
ArrayList originalOutputColumnNames = conf.getOutputColumnNames();
if (cols.size() < originalOutputColumnNames.size()) {
- ArrayList newColList = new ArrayList();
+ ArrayList newColList = new ArrayList();
ArrayList newOutputColumnNames = new ArrayList();
Vector rs_oldsignature = op.getSchema().getSignature();
Vector rs_newsignature = new Vector();
@@ -370,8 +370,8 @@
private static boolean[] getPruneReduceSinkOpRetainFlags(
List retainedParentOpOutputCols, ReduceSinkOperator reduce) {
- reduceSinkDesc reduceConf = reduce.getConf();
- java.util.ArrayList originalValueEval = reduceConf
+ ReduceSinkDesc reduceConf = reduce.getConf();
+ java.util.ArrayList originalValueEval = reduceConf
.getValueCols();
boolean[] flags = new boolean[originalValueEval.size()];
for (int i = 0; i < originalValueEval.size(); i++) {
@@ -394,18 +394,18 @@
private static void pruneReduceSinkOperator(boolean[] retainFlags,
ReduceSinkOperator reduce, ColumnPrunerProcCtx cppCtx)
throws SemanticException {
- reduceSinkDesc reduceConf = reduce.getConf();
- Map oldMap = reduce.getColumnExprMap();
- Map newMap = new HashMap();
+ ReduceSinkDesc reduceConf = reduce.getConf();
+ Map oldMap = reduce.getColumnExprMap();
+ Map newMap = new HashMap();
Vector sig = new Vector();
RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(reduce).getRR();
RowResolver newRR = new RowResolver();
ArrayList originalValueOutputColNames = reduceConf
.getOutputValueColumnNames();
- java.util.ArrayList originalValueEval = reduceConf
+ java.util.ArrayList originalValueEval = reduceConf
.getValueCols();
ArrayList newOutputColNames = new ArrayList();
- java.util.ArrayList newValueEval = new ArrayList();
+ java.util.ArrayList newValueEval = new ArrayList();
for (int i = 0; i < retainFlags.length; i++) {
if (retainFlags[i]) {
newValueEval.add(originalValueEval.get(i));
@@ -423,7 +423,7 @@
}
}
- ArrayList keyCols = reduceConf.getKeyCols();
+ ArrayList keyCols = reduceConf.getKeyCols();
List keys = new ArrayList();
RowResolver parResover = cppCtx.getOpToParseCtxMap().get(
reduce.getParentOperators().get(0)).getRR();
@@ -444,7 +444,7 @@
reduce.getSchema().setSignature(sig);
reduceConf.setOutputValueColumnNames(newOutputColNames);
reduceConf.setValueCols(newValueEval);
- tableDesc newValueTable = PlanUtils.getReduceValueTableDesc(PlanUtils
+ TableDesc newValueTable = PlanUtils.getReduceValueTableDesc(PlanUtils
.getFieldSchemasFromColumnList(reduceConf.getValueCols(),
newOutputColNames, 0, ""));
reduceConf.setValueSerializeInfo(newValueTable);
@@ -495,8 +495,8 @@
}
private static void pruneJoinOperator(NodeProcessorCtx ctx,
- CommonJoinOperator op, joinDesc conf,
- Map columnExprMap,
+ CommonJoinOperator op, JoinDesc conf,
+ Map columnExprMap,
Map> retainMap, boolean mapJoin)
throws SemanticException {
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
@@ -516,11 +516,11 @@
RowResolver newJoinRR = new RowResolver();
ArrayList outputCols = new ArrayList();
Vector rs = new Vector();
- Map newColExprMap = new HashMap();
+ Map newColExprMap = new HashMap();
for (int i = 0; i < conf.getOutputColumnNames().size(); i++) {
String internalName = conf.getOutputColumnNames().get(i);
- exprNodeDesc desc = columnExprMap.get(internalName);
+ ExprNodeDesc desc = columnExprMap.get(internalName);
Byte tag = conf.getReversedExprs().get(internalName);
if (!childColLists.contains(internalName)) {
int index = conf.getExprs().get(tag).indexOf(desc);
@@ -545,30 +545,30 @@
if (mapJoin) {
// regenerate the valueTableDesc
- List valueTableDescs = new ArrayList();
+ List valueTableDescs = new ArrayList();
for (int pos = 0; pos < op.getParentOperators().size(); pos++) {
- List valueCols = conf.getExprs()
+ List valueCols = conf.getExprs()
.get(new Byte((byte) pos));
StringBuilder keyOrder = new StringBuilder();
for (int i = 0; i < valueCols.size(); i++) {
keyOrder.append("+");
}
- tableDesc valueTableDesc = PlanUtils.getMapJoinValueTableDesc(PlanUtils
+ TableDesc valueTableDesc = PlanUtils.getMapJoinValueTableDesc(PlanUtils
.getFieldSchemasFromColumnList(valueCols, "mapjoinvalue"));
valueTableDescs.add(valueTableDesc);
}
- ((mapJoinDesc) conf).setValueTblDescs(valueTableDescs);
+ ((MapJoinDesc) conf).setValueTblDescs(valueTableDescs);
- Set>> exprs = ((mapJoinDesc) conf)
+ Set>> exprs = ((MapJoinDesc) conf)
.getKeys().entrySet();
- Iterator>> iters = exprs.iterator();
+ Iterator>> iters = exprs.iterator();
while (iters.hasNext()) {
- Map.Entry> entry = iters.next();
- List lists = entry.getValue();
+ Map.Entry> entry = iters.next();
+ List lists = entry.getValue();
for (int j = 0; j < lists.size(); j++) {
- exprNodeDesc desc = lists.get(j);
+ ExprNodeDesc desc = lists.get(j);
Byte tag = entry.getKey();
List cols = prunedColLists.get(tag);
cols = Utilities.mergeUniqElems(cols, desc.getCols());
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink1.java (working copy)
@@ -31,7 +31,7 @@
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
/**
* Processor for the rule - table scan followed by reduce sink
@@ -58,7 +58,7 @@
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2));
Task extends Serializable> currTask = mapredCtx.getCurrTask();
- mapredWork currPlan = (mapredWork) currTask.getWork();
+ MapredWork currPlan = (MapredWork) currTask.getWork();
Operator extends Serializable> currTopOp = mapredCtx.getCurrTopOp();
String currAliasId = mapredCtx.getCurrAliasId();
Operator extends Serializable> reducer = op.getChildOperators().get(0);
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink3.java (working copy)
@@ -33,7 +33,7 @@
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
/**
* Processor for the rule - union followed by reduce sink
@@ -71,7 +71,7 @@
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
Task extends Serializable> currTask = mapredCtx.getCurrTask();
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
HashMap, Task extends Serializable>> opTaskMap = ctx
.getOpTaskMap();
Task extends Serializable> opMapTask = opTaskMap.get(reducer);
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRRedSink4.java (working copy)
@@ -31,7 +31,7 @@
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
/**
* Processor for the rule - map join followed by reduce sink
@@ -63,7 +63,7 @@
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
Task extends Serializable> currTask = mapredCtx.getCurrTask();
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
HashMap, Task extends Serializable>> opTaskMap = ctx
.getOpTaskMap();
Task extends Serializable> opMapTask = opTaskMap.get(reducer);
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/SamplePruner.java (working copy)
@@ -41,8 +41,8 @@
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.filterDesc;
-import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
/**
* The transformation step that does sample pruning.
@@ -116,7 +116,7 @@
public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
FilterOperator filOp = (FilterOperator) nd;
- filterDesc filOpDesc = filOp.getConf();
+ FilterDesc filOpDesc = filOp.getConf();
sampleDesc sampleDescr = filOpDesc.getSampleDescr();
if ((sampleDescr == null) || !sampleDescr.getInputPruning()) {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy)
@@ -50,17 +50,17 @@
import org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles;
import org.apache.hadoop.hive.ql.plan.ConditionalWork;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.extractDesc;
-import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
-import org.apache.hadoop.hive.ql.plan.loadFileDesc;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
-import org.apache.hadoop.hive.ql.plan.moveWork;
-import org.apache.hadoop.hive.ql.plan.partitionDesc;
-import org.apache.hadoop.hive.ql.plan.reduceSinkDesc;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
-import org.apache.hadoop.hive.ql.plan.tableScanDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExtractDesc;
+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.MoveWork;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.ConditionalResolverMergeFiles.ConditionalResolverMergeFilesCtx;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -105,10 +105,10 @@
// merge for a map-only job
// or for a map-reduce job
if ((parseCtx.getConf().getBoolVar(
- HiveConf.ConfVars.HIVEMERGEMAPFILES) && (((mapredWork) currTask
+ HiveConf.ConfVars.HIVEMERGEMAPFILES) && (((MapredWork) currTask
.getWork()).getReducer() == null))
|| (parseCtx.getConf().getBoolVar(
- HiveConf.ConfVars.HIVEMERGEMAPREDFILES) && (((mapredWork) currTask
+ HiveConf.ConfVars.HIVEMERGEMAPREDFILES) && (((MapredWork) currTask
.getWork()).getReducer() != null))) {
chDir = true;
}
@@ -132,35 +132,35 @@
RowSchema fsRS = fsOp.getSchema();
// create a reduce Sink operator - key is the first column
- ArrayList keyCols = new ArrayList();
+ ArrayList keyCols = new ArrayList();
keyCols.add(TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("rand"));
- ArrayList valueCols = new ArrayList();
+ ArrayList valueCols = new ArrayList();
for (ColumnInfo ci : fsRS.getSignature()) {
- valueCols.add(new exprNodeColumnDesc(ci.getType(), ci.getInternalName(),
+ valueCols.add(new ExprNodeColumnDesc(ci.getType(), ci.getInternalName(),
ci.getTabAlias(), ci.getIsPartitionCol()));
}
// create a dummy tableScan operator
Operator extends Serializable> ts_op = OperatorFactory.get(
- tableScanDesc.class, fsRS);
+ TableScanDesc.class, fsRS);
ArrayList outputColumns = new ArrayList();
for (int i = 0; i < valueCols.size(); i++) {
outputColumns.add(SemanticAnalyzer.getColumnInternalName(i));
}
- reduceSinkDesc rsDesc = PlanUtils.getReduceSinkDesc(
- new ArrayList(), valueCols, outputColumns, false, -1, -1,
+ ReduceSinkDesc rsDesc = PlanUtils.getReduceSinkDesc(
+ new ArrayList(), valueCols, outputColumns, false, -1, -1,
-1);
OperatorFactory.getAndMakeChild(rsDesc, fsRS, ts_op);
- mapredWork cplan = GenMapRedUtils.getMapRedWork();
+ MapredWork cplan = GenMapRedUtils.getMapRedWork();
ParseContext parseCtx = ctx.getParseCtx();
Task extends Serializable> mergeTask = TaskFactory.get(cplan, parseCtx
.getConf());
- fileSinkDesc fsConf = fsOp.getConf();
+ FileSinkDesc fsConf = fsOp.getConf();
// Add the extract operator to get the value fields
RowResolver out_rwsch = new RowResolver();
@@ -174,19 +174,19 @@
pos = Integer.valueOf(pos.intValue() + 1);
}
- Operator extract = OperatorFactory.getAndMakeChild(new extractDesc(
- new exprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
+ Operator extract = OperatorFactory.getAndMakeChild(new ExtractDesc(
+ new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema(
out_rwsch.getColumnInfos()));
- tableDesc ts = (tableDesc) fsConf.getTableInfo().clone();
+ TableDesc ts = (TableDesc) fsConf.getTableInfo().clone();
fsConf
.getTableInfo()
.getProperties()
.remove(
org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
FileSinkOperator newOutput = (FileSinkOperator) OperatorFactory
- .getAndMakeChild(new fileSinkDesc(finalName, ts, parseCtx.getConf()
+ .getAndMakeChild(new FileSinkDesc(finalName, ts, parseCtx.getConf()
.getBoolVar(HiveConf.ConfVars.COMPRESSRESULT)), fsRS, extract);
cplan.setReducer(extract);
@@ -195,10 +195,10 @@
cplan.getPathToAliases().put(fsConf.getDirName(), aliases);
cplan.getAliasToWork().put(fsConf.getDirName(), ts_op);
cplan.getPathToPartitionInfo().put(fsConf.getDirName(),
- new partitionDesc(fsConf.getTableInfo(), null));
+ new PartitionDesc(fsConf.getTableInfo(), null));
cplan.setNumReduceTasks(-1);
- moveWork dummyMv = new moveWork(null, null, null, new loadFileDesc(fsOp
+ MoveWork dummyMv = new MoveWork(null, null, null, new LoadFileDesc(fsOp
.getConf().getDirName(), finalName, true, null, null), false);
Task extends Serializable> dummyMergeTask = TaskFactory.get(dummyMv, ctx
.getConf());
@@ -234,7 +234,7 @@
List> mvTasks, FileSinkOperator fsOp) {
// find the move task
for (Task extends Serializable> mvTsk : mvTasks) {
- moveWork mvWork = (moveWork) mvTsk.getWork();
+ MoveWork mvWork = (MoveWork) mvTsk.getWork();
String srcDir = null;
if (mvWork.getLoadFileWork() != null) {
srcDir = mvWork.getLoadFileWork().getSourceDir();
@@ -315,14 +315,14 @@
assert (!seenOps.contains(currTopOp));
seenOps.add(currTopOp);
GenMapRedUtils.setTaskPlan(currAliasId, currTopOp,
- (mapredWork) currTask.getWork(), false, ctx);
+ (MapredWork) currTask.getWork(), false, ctx);
opTaskMap.put(null, currTask);
rootTasks.add(currTask);
} else {
if (!seenOps.contains(currTopOp)) {
seenOps.add(currTopOp);
GenMapRedUtils.setTaskPlan(currAliasId, currTopOp,
- (mapredWork) mapTask.getWork(), false, ctx);
+ (MapredWork) mapTask.getWork(), false, ctx);
}
// mapTask and currTask should be merged by and join/union operator
// (e.g., GenMRUnion1j) which has multiple topOps.
@@ -347,15 +347,15 @@
if (currMapJoinOp != null) {
opTaskMap.put(null, currTask);
GenMRMapJoinCtx mjCtx = ctx.getMapJoinCtx(currMapJoinOp);
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
String taskTmpDir = mjCtx.getTaskTmpDir();
- tableDesc tt_desc = mjCtx.getTTDesc();
+ TableDesc tt_desc = mjCtx.getTTDesc();
assert plan.getPathToAliases().get(taskTmpDir) == null;
plan.getPathToAliases().put(taskTmpDir, new ArrayList());
plan.getPathToAliases().get(taskTmpDir).add(taskTmpDir);
plan.getPathToPartitionInfo().put(taskTmpDir,
- new partitionDesc(tt_desc, null));
+ new PartitionDesc(tt_desc, null));
plan.getAliasToWork().put(taskTmpDir, mjCtx.getRootMapJoinOp());
return dest;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpWalkerCtx.java (working copy)
@@ -22,7 +22,7 @@
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
/**
* Context class for operator tree walker for partition pruner.
@@ -35,17 +35,17 @@
* Map from tablescan operator to partition pruning predicate that is
* initialized from the ParseContext
*/
- private final HashMap opToPartPruner;
+ private final HashMap opToPartPruner;
/**
* Constructor
*/
- public OpWalkerCtx(HashMap opToPartPruner) {
+ public OpWalkerCtx(HashMap opToPartPruner) {
this.opToPartPruner = opToPartPruner;
hasNonPartCols = false;
}
- public HashMap getOpToPartPruner() {
+ public HashMap getOpToPartPruner() {
return opToPartPruner;
}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (working copy)
@@ -48,9 +48,9 @@
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -109,25 +109,25 @@
* @param expr
* the pruner expression for the table
*/
- public static boolean onlyContainsPartnCols(Table tab, exprNodeDesc expr) {
+ public static boolean onlyContainsPartnCols(Table tab, ExprNodeDesc expr) {
if (!tab.isPartitioned() || (expr == null)) {
return true;
}
- if (expr instanceof exprNodeColumnDesc) {
- String colName = ((exprNodeColumnDesc) expr).getColumn();
+ if (expr instanceof ExprNodeColumnDesc) {
+ String colName = ((ExprNodeColumnDesc) expr).getColumn();
return tab.isPartitionKey(colName);
}
// It cannot contain a non-deterministic function
- if ((expr instanceof exprNodeGenericFuncDesc)
- && !FunctionRegistry.isDeterministic(((exprNodeGenericFuncDesc) expr)
+ if ((expr instanceof ExprNodeGenericFuncDesc)
+ && !FunctionRegistry.isDeterministic(((ExprNodeGenericFuncDesc) expr)
.getGenericUDF())) {
return false;
}
// All columns of the expression must be parttioned columns
- List children = expr.getChildren();
+ List children = expr.getChildren();
if (children != null) {
for (int i = 0; i < children.size(); i++) {
if (!onlyContainsPartnCols(tab, children.get(i))) {
@@ -155,7 +155,7 @@
* pruner condition.
* @throws HiveException
*/
- public static PrunedPartitionList prune(Table tab, exprNodeDesc prunerExpr,
+ public static PrunedPartitionList prune(Table tab, ExprNodeDesc prunerExpr,
HiveConf conf, String alias,
Map prunedPartitionsMap)
throws HiveException {
@@ -271,17 +271,17 @@
/**
* Whether the expression contains a column node or not.
*/
- public static boolean hasColumnExpr(exprNodeDesc desc) {
+ public static boolean hasColumnExpr(ExprNodeDesc desc) {
// Return false for null
if (desc == null) {
return false;
}
- // Return true for exprNodeColumnDesc
- if (desc instanceof exprNodeColumnDesc) {
+ // Return true for ExprNodeColumnDesc
+ if (desc instanceof ExprNodeColumnDesc) {
return true;
}
// Return true in case one of the children is column expr.
- List children = desc.getChildren();
+ List children = desc.getChildren();
if (children != null) {
for (int i = 0; i < children.size(); i++) {
if (hasColumnExpr(children.get(i))) {
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/ExprProcFactory.java (working copy)
@@ -36,12 +36,12 @@
import org.apache.hadoop.hive.ql.lib.Rule;
import org.apache.hadoop.hive.ql.lib.RuleRegExp;
import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
/**
* Expression processor factory for partition pruning. Each processor tries to
@@ -60,14 +60,14 @@
public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
- exprNodeDesc newcd = null;
- exprNodeColumnDesc cd = (exprNodeColumnDesc) nd;
+ ExprNodeDesc newcd = null;
+ ExprNodeColumnDesc cd = (ExprNodeColumnDesc) nd;
ExprProcCtx epc = (ExprProcCtx) procCtx;
if (cd.getTabAlias().equalsIgnoreCase(epc.getTabAlias())
&& cd.getIsParititonCol()) {
newcd = cd.clone();
} else {
- newcd = new exprNodeConstantDesc(cd.getTypeInfo(), null);
+ newcd = new ExprNodeConstantDesc(cd.getTypeInfo(), null);
epc.setHasNonPartCols(true);
}
@@ -87,8 +87,8 @@
public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
- exprNodeDesc newfd = null;
- exprNodeGenericFuncDesc fd = (exprNodeGenericFuncDesc) nd;
+ ExprNodeDesc newfd = null;
+ ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd;
boolean unknown = false;
@@ -106,24 +106,24 @@
} else {
// If any child is null, set unknown to true
for (Object child : nodeOutputs) {
- exprNodeDesc child_nd = (exprNodeDesc) child;
- if (child_nd instanceof exprNodeConstantDesc
- && ((exprNodeConstantDesc) child_nd).getValue() == null) {
+ ExprNodeDesc child_nd = (ExprNodeDesc) child;
+ if (child_nd instanceof ExprNodeConstantDesc
+ && ((ExprNodeConstantDesc) child_nd).getValue() == null) {
unknown = true;
}
}
}
if (unknown) {
- newfd = new exprNodeConstantDesc(fd.getTypeInfo(), null);
+ newfd = new ExprNodeConstantDesc(fd.getTypeInfo(), null);
} else {
// Create the list of children
- ArrayList children = new ArrayList();
+ ArrayList children = new ArrayList();
for (Object child : nodeOutputs) {
- children.add((exprNodeDesc) child);
+ children.add((ExprNodeDesc) child);
}
// Create a copy of the function descriptor
- newfd = new exprNodeGenericFuncDesc(fd.getTypeInfo(), fd
+ newfd = new ExprNodeGenericFuncDesc(fd.getTypeInfo(), fd
.getGenericUDF(), children);
}
@@ -138,14 +138,14 @@
public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
- exprNodeFieldDesc fnd = (exprNodeFieldDesc) nd;
+ ExprNodeFieldDesc fnd = (ExprNodeFieldDesc) nd;
boolean unknown = false;
int idx = 0;
- exprNodeDesc left_nd = null;
+ ExprNodeDesc left_nd = null;
for (Object child : nodeOutputs) {
- exprNodeDesc child_nd = (exprNodeDesc) child;
- if (child_nd instanceof exprNodeConstantDesc
- && ((exprNodeConstantDesc) child_nd).getValue() == null) {
+ ExprNodeDesc child_nd = (ExprNodeDesc) child;
+ if (child_nd instanceof ExprNodeConstantDesc
+ && ((ExprNodeConstantDesc) child_nd).getValue() == null) {
unknown = true;
}
left_nd = child_nd;
@@ -153,11 +153,11 @@
assert (idx == 0);
- exprNodeDesc newnd = null;
+ ExprNodeDesc newnd = null;
if (unknown) {
- newnd = new exprNodeConstantDesc(fnd.getTypeInfo(), null);
+ newnd = new ExprNodeConstantDesc(fnd.getTypeInfo(), null);
} else {
- newnd = new exprNodeFieldDesc(fnd.getTypeInfo(), left_nd, fnd
+ newnd = new ExprNodeFieldDesc(fnd.getTypeInfo(), left_nd, fnd
.getFieldName(), fnd.getIsList());
}
return newnd;
@@ -167,17 +167,17 @@
/**
* Processor for constants and null expressions. For such expressions the
- * processor simply clones the exprNodeDesc and returns it.
+ * processor simply clones the ExprNodeDesc and returns it.
*/
public static class DefaultExprProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
- if (nd instanceof exprNodeConstantDesc) {
- return ((exprNodeConstantDesc) nd).clone();
- } else if (nd instanceof exprNodeNullDesc) {
- return ((exprNodeNullDesc) nd).clone();
+ if (nd instanceof ExprNodeConstantDesc) {
+ return ((ExprNodeConstantDesc) nd).clone();
+ } else if (nd instanceof ExprNodeNullDesc) {
+ return ((ExprNodeNullDesc) nd).clone();
}
assert (false);
@@ -214,7 +214,7 @@
* has a non partition column
* @throws SemanticException
*/
- public static exprNodeDesc genPruner(String tabAlias, exprNodeDesc pred,
+ public static ExprNodeDesc genPruner(String tabAlias, ExprNodeDesc pred,
boolean hasNonPartCols) throws SemanticException {
// Create the walker, the rules dispatcher and the context.
ExprProcCtx pprCtx = new ExprProcCtx(tabAlias);
@@ -224,12 +224,12 @@
// generates the plan from the operator tree
Map exprRules = new LinkedHashMap();
exprRules.put(
- new RuleRegExp("R1", exprNodeColumnDesc.class.getName() + "%"),
+ new RuleRegExp("R1", ExprNodeColumnDesc.class.getName() + "%"),
getColumnProcessor());
exprRules.put(
- new RuleRegExp("R2", exprNodeFieldDesc.class.getName() + "%"),
+ new RuleRegExp("R2", ExprNodeFieldDesc.class.getName() + "%"),
getFieldProcessor());
- exprRules.put(new RuleRegExp("R5", exprNodeGenericFuncDesc.class.getName()
+ exprRules.put(new RuleRegExp("R5", ExprNodeGenericFuncDesc.class.getName()
+ "%"), getGenericFuncProcessor());
// The dispatcher fires the processor corresponding to the closest matching
@@ -245,8 +245,8 @@
egw.startWalking(startNodes, outputMap);
hasNonPartCols = pprCtx.getHasNonPartCols();
- // Get the exprNodeDesc corresponding to the first start node;
- return (exprNodeDesc) outputMap.get(pred);
+ // Get the ExprNodeDesc corresponding to the first start node;
+ return (ExprNodeDesc) outputMap.get(pred);
}
}
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (working copy)
@@ -28,7 +28,7 @@
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
-import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
/**
* Operator factory for partition pruning processing of operator graph We find
@@ -81,12 +81,12 @@
}
// Otherwise this is not a sampling predicate and we need to
- exprNodeDesc predicate = fop.getConf().getPredicate();
+ ExprNodeDesc predicate = fop.getConf().getPredicate();
String alias = top.getConf().getAlias();
// Generate the partition pruning predicate
boolean hasNonPartCols = false;
- exprNodeDesc ppr_pred = ExprProcFactory.genPruner(alias, predicate,
+ ExprNodeDesc ppr_pred = ExprProcFactory.genPruner(alias, predicate,
hasNonPartCols);
owc.addHasNonPartCols(hasNonPartCols);
@@ -96,10 +96,10 @@
return null;
}
- private void addPruningPred(Map opToPPR,
- TableScanOperator top, exprNodeDesc new_ppr_pred) {
- exprNodeDesc old_ppr_pred = opToPPR.get(top);
- exprNodeDesc ppr_pred = null;
+ private void addPruningPred(Map opToPPR,
+ TableScanOperator top, ExprNodeDesc new_ppr_pred) {
+ ExprNodeDesc old_ppr_pred = opToPPR.get(top);
+ ExprNodeDesc ppr_pred = null;
if (old_ppr_pred != null) {
// or the old_ppr_pred and the new_ppr_pred
ppr_pred = TypeCheckProcFactory.DefaultExprProcessor
Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
===================================================================
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (revision 901960)
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (working copy)
@@ -56,16 +56,16 @@
import org.apache.hadoop.hive.ql.parse.RowResolver;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.plan.fetchWork;
-import org.apache.hadoop.hive.ql.plan.fileSinkDesc;
-import org.apache.hadoop.hive.ql.plan.mapJoinDesc;
-import org.apache.hadoop.hive.ql.plan.mapredLocalWork;
-import org.apache.hadoop.hive.ql.plan.mapredWork;
-import org.apache.hadoop.hive.ql.plan.partitionDesc;
-import org.apache.hadoop.hive.ql.plan.reduceSinkDesc;
-import org.apache.hadoop.hive.ql.plan.tableDesc;
-import org.apache.hadoop.hive.ql.plan.tableScanDesc;
-import org.apache.hadoop.hive.ql.plan.filterDesc.sampleDesc;
+import org.apache.hadoop.hive.ql.plan.FetchWork;
+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
+import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
+import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.plan.TableScanDesc;
+import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
/**
* General utility common functions for the Processor to convert operator into
@@ -94,14 +94,14 @@
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
Task extends Serializable> currTask = mapredCtx.getCurrTask();
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
HashMap, Task extends Serializable>> opTaskMap = opProcCtx
.getOpTaskMap();
Operator extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
opTaskMap.put(reducer, currTask);
plan.setReducer(reducer);
- reduceSinkDesc desc = op.getConf();
+ ReduceSinkDesc desc = op.getConf();
plan.setNumReduceTasks(desc.getNumReducers());
@@ -148,7 +148,7 @@
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(
parentPos));
Task extends Serializable> currTask = mapredCtx.getCurrTask();
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
HashMap, Task extends Serializable>> opTaskMap = opProcCtx
.getOpTaskMap();
Operator extends Serializable> currTopOp = opProcCtx.getCurrTopOp();
@@ -169,7 +169,7 @@
if (reducer.getClass() == JoinOperator.class) {
plan.setNeedsTagging(true);
}
- reduceSinkDesc desc = (reduceSinkDesc) op.getConf();
+ ReduceSinkDesc desc = (ReduceSinkDesc) op.getConf();
plan.setNumReduceTasks(desc.getNumReducers());
} else {
opTaskMap.put(op, currTask);
@@ -178,7 +178,7 @@
if (!readInputUnion) {
GenMRMapJoinCtx mjCtx = opProcCtx.getMapJoinCtx(currMapJoinOp);
String taskTmpDir;
- tableDesc tt_desc;
+ TableDesc tt_desc;
Operator extends Serializable> rootOp;
if (mjCtx.getOldMapJoin() == null) {
@@ -200,7 +200,7 @@
opProcCtx.setCurrMapJoinOp(null);
} else {
- mapJoinDesc desc = (mapJoinDesc) op.getConf();
+ MapJoinDesc desc = (MapJoinDesc) op.getConf();
// The map is overloaded to keep track of mapjoins also
opTaskMap.put(op, currTask);
@@ -237,13 +237,13 @@
.getMapCurrCtx();
GenMapRedCtx mapredCtx = mapCurrCtx.get(op.getParentOperators().get(0));
Task extends Serializable> currTask = mapredCtx.getCurrTask();
- mapredWork plan = (mapredWork) currTask.getWork();
+ MapredWork plan = (MapredWork) currTask.getWork();
HashMap