diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java index 154a78b..dfad6c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java @@ -281,4 +281,13 @@ private Path backupOutputPath(FileSystem fs, Path outpath) return null; } } + + @Override + public String getName() { + return AbstractFileMergeOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "MERGE"; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/AppMasterEventOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/AppMasterEventOperator.java index 743098b..bf30ef1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/AppMasterEventOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/AppMasterEventOperator.java @@ -20,9 +20,7 @@ import java.io.IOException; import java.nio.ByteBuffer; -import java.util.Collection; import java.util.Collections; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; @@ -151,7 +149,7 @@ public OperatorType getType() { */ @Override public String getName() { - return getOperatorName(); + return AppMasterEventOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java index 27ddf13..16675f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java @@ -20,8 +20,6 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.Collection; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -94,4 +92,13 @@ public void retrieve(InspectableObject result) { public OperatorType getType() { return null; } + + @Override + public String getName() { + return CollectOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "COLLECT"; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java index f8520f8..117a81e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CommonJoinOperator.java @@ -21,14 +21,10 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -44,6 +40,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Join operator implementation. @@ -793,7 +791,7 @@ public void closeOp(boolean abort) throws HiveException { @Override public String getName() { - return getOperatorName(); + return CommonJoinOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java index b897c16..c184742 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DemuxOperator.java @@ -21,14 +21,10 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -41,6 +37,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hive.common.util.ReflectionUtil; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * DemuxOperator is an operator used by MapReduce Jobs optimized by @@ -374,7 +372,7 @@ public void endGroup() throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return DemuxOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java index 06a3884..2a1be63 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DummyStoreOperator.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -118,4 +116,13 @@ public InspectableObject getResult() { public OperatorType getType() { return OperatorType.FORWARD; } + + @Override + public String getName() { + return DummyStoreOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "DUMMY_STORE"; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java index 08f2633..bd0d28c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; @@ -134,7 +132,7 @@ public void process(Object row, int tag) throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return FilterOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java index 2df7cca..8e516ce 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -55,7 +53,7 @@ public boolean acceptLimitPushdown() { */ @Override public String getName() { - return getOperatorName(); + return ForwardOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java index 0839b42..e39b75e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java @@ -24,14 +24,12 @@ import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.Future; import javolution.util.FastBitSet; @@ -1131,7 +1129,7 @@ public void closeOp(boolean abort) throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return GroupByOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java index 4749247..0aab7a8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableDummyOperator.java @@ -18,8 +18,6 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -67,7 +65,7 @@ public void closeOp(boolean abort) throws HiveException { @Override public String getName() { - return getOperatorName(); + return HashTableDummyOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java index 4c94ad9..edc400a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewForwardOperator.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.exec; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -44,7 +41,7 @@ public void process(Object row, int tag) throws HiveException { @Override public String getName() { - return getOperatorName(); + return LateralViewForwardOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java index 7407dc6..cf3c5f0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/LateralViewJoinOperator.java @@ -19,9 +19,7 @@ package org.apache.hadoop.hive.ql.exec; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -149,7 +147,7 @@ public void process(Object row, int tag) throws HiveException { @Override public String getName() { - return getOperatorName(); + return LateralViewJoinOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java index 239d56b..9676d70 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -73,7 +71,7 @@ public void process(Object row, int tag) throws HiveException { @Override public String getName() { - return getOperatorName(); + return LimitOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java index 2f2abc1..b081cd0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java @@ -18,10 +18,8 @@ package org.apache.hadoop.hive.ql.exec; -import java.util.Collection; import java.util.List; import java.util.Properties; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -107,4 +105,13 @@ public void process(Object row, int tag) throws HiveException { public OperatorType getType() { return OperatorType.FORWARD; } + + @Override + public String getName() { + return ListSinkOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "LIST_SINK"; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java index 4608f70..b1f9958 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java @@ -21,7 +21,6 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -31,18 +30,15 @@ import java.util.Properties; import java.util.Set; import java.util.TreeMap; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.CompilationOpContext; -import org.apache.hadoop.hive.ql.exec.MapOperator.MapOpCtx; import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.RecordIdentifier; -import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.plan.MapWork; @@ -59,14 +55,12 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; @@ -681,7 +675,7 @@ public void process(Object row, int tag) throws HiveException { @Override public String getName() { - return getOperatorName(); + return MapOperator.getOperatorName(); } static public String getOperatorName() { @@ -716,4 +710,5 @@ public void setConnectedOperators(int tag, DummyStoreOperator dummyOp) { public Map getConnectedOperators() { return connectedOperators; } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java index d8444fb..9849243 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MuxOperator.java @@ -21,12 +21,8 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -36,6 +32,8 @@ import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * MuxOperator is used in the Reduce side of MapReduce jobs optimized by Correlation Optimizer. @@ -331,7 +329,7 @@ protected void closeOp(boolean abort) throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return MuxOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index 571620e..91f55f7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -31,8 +31,6 @@ import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; @@ -51,6 +49,8 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Base operator implementation. @@ -894,7 +894,7 @@ public void logStats() { */ @Override public String getName() { - return getOperatorName(); + return Operator.getOperatorName(); } static public String getOperatorName() { @@ -1351,6 +1351,15 @@ public OperatorType getType() { } @Override + public String getName() { + return DummyOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "DUMMY"; + } + + @Override protected void initializeOp(Configuration conf) { } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java index 2e9e539..37ae8fe 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java @@ -19,11 +19,9 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Stack; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -184,7 +182,7 @@ protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return PTFOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java index 2bce5d0..00884cd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java @@ -18,6 +18,24 @@ package org.apache.hadoop.hive.ql.exec; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.TimeUnit; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -41,26 +59,6 @@ import org.apache.spark.SparkEnv; import org.apache.spark.SparkFiles; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.File; -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; -import java.util.Set; -import java.util.Timer; -import java.util.TimerTask; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - /** * ScriptOperator. * @@ -862,7 +860,7 @@ public void run() { @Override public String getName() { - return getOperatorName(); + return ScriptOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java index e7c23e8..9049ddd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java @@ -19,9 +19,7 @@ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; @@ -102,7 +100,7 @@ public void process(Object row, int tag) throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return SelectOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java index 5837614..523ff7c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/SparkHashTableSinkOperator.java @@ -21,13 +21,9 @@ import java.io.ObjectOutputStream; import java.io.OutputStream; import java.io.Serializable; -import java.util.Collection; import java.util.Set; -import java.util.concurrent.Future; import org.apache.commons.io.FileExistsException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -44,6 +40,8 @@ import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SparkHashTableSinkOperator extends TerminalOperator implements Serializable { @@ -195,6 +193,10 @@ protected void flushToFile(MapJoinPersistableTableContainer tableContainer, */ @Override public String getName() { + return SparkHashTableSinkOperator.getOperatorName(); + } + + public static String getOperatorName() { return HashTableSinkOperator.getOperatorName(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java index 5f2a0c2..6afe957 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java @@ -277,7 +277,7 @@ public void closeOp(boolean abort) throws HiveException { **/ @Override public String getName() { - return getOperatorName(); + return TableScanOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java index 04d6c9f..aec2f11 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TerminalOperator.java @@ -38,4 +38,14 @@ protected TerminalOperator() { public TerminalOperator(CompilationOpContext ctx) { super(ctx); } + + @Override + public String getName() { + return getOperatorName(); + } + + static public String getOperatorName() { + return "END"; + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java index 1dae963..a75b52a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java @@ -20,13 +20,9 @@ import java.io.Serializable; import java.util.Arrays; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -37,6 +33,8 @@ import org.apache.hadoop.hive.ql.udf.generic.UDTFCollector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * UDTFOperator. @@ -137,7 +135,7 @@ public void forwardUDTFOutput(Object o) throws HiveException { @Override public String getName() { - return getOperatorName(); + return UDTFOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java index 3a673e6..39b2776 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UnionOperator.java @@ -156,7 +156,7 @@ public synchronized void process(Object row, int tag) throws HiveException { */ @Override public String getName() { - return getOperatorName(); + return UnionOperator.getOperatorName(); } static public String getOperatorName() { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java index c591288..1951569 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorAppMasterEventOperator.java @@ -18,22 +18,18 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.AppMasterEventDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; -import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import org.apache.hadoop.io.ObjectWritable; -import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.Writable; +import com.google.common.annotations.VisibleForTesting; + /** * App Master Event operator implementation. **/ @@ -61,7 +57,8 @@ public VectorAppMasterEventOperator( } /** Kryo ctor. */ - protected VectorAppMasterEventOperator() { + @VisibleForTesting + public VectorAppMasterEventOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java index f09534c..a3082c3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; @@ -29,6 +26,8 @@ import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import com.google.common.annotations.VisibleForTesting; + /** * File Sink operator implementation. **/ @@ -56,7 +55,8 @@ public VectorFileSinkOperator(CompilationOpContext ctx, } /** Kryo ctor. */ - protected VectorFileSinkOperator() { + @VisibleForTesting + public VectorFileSinkOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java index 74a0947..261246b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFilterOperator.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -32,6 +29,8 @@ import org.apache.hadoop.hive.ql.plan.FilterDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import com.google.common.annotations.VisibleForTesting; + /** * Filter operator implementation. **/ @@ -57,7 +56,8 @@ public VectorFilterOperator(CompilationOpContext ctx, } /** Kryo ctor. */ - protected VectorFilterOperator() { + @VisibleForTesting + public VectorFilterOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java index 31f5c72..f20f614 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java @@ -22,15 +22,11 @@ import java.lang.management.MemoryMXBean; import java.lang.ref.SoftReference; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -51,6 +47,10 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.DataOutputBuffer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; /** * Vectorized GROUP BY operator implementation. Consumes the vectorized input and @@ -771,7 +771,8 @@ public VectorGroupByOperator(CompilationOpContext ctx, } /** Kryo ctor. */ - protected VectorGroupByOperator() { + @VisibleForTesting + public VectorGroupByOperator() { super(); } @@ -959,10 +960,6 @@ public void closeOp(boolean aborted) throws HiveException { } } - static public String getOperatorName() { - return "GBY"; - } - public VectorExpression[] getKeyExpressions() { return keyExpressions; } @@ -988,4 +985,14 @@ public VectorizationContext getOuputVectorizationContext() { public OperatorType getType() { return OperatorType.GROUPBY; } + + @Override + public String getName() { + return getOperatorName(); + } + + static public String getOperatorName() { + return "GBY"; + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java index 154c647..ea00af3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorLimitOperator.java @@ -24,6 +24,8 @@ import org.apache.hadoop.hive.ql.plan.LimitDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import com.google.common.annotations.VisibleForTesting; + /** * Limit operator implementation Limits the number of rows to be passed on. **/ @@ -32,7 +34,8 @@ private static final long serialVersionUID = 1L; /** Kryo ctor. */ - protected VectorLimitOperator() { + @VisibleForTesting + public VectorLimitOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java index 622f777..e8f4471 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java @@ -19,13 +19,9 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Map; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; @@ -41,6 +37,10 @@ import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; /** * The vectorized version of the MapJoinOperator. @@ -76,7 +76,8 @@ protected transient Object[] singleRow; /** Kryo ctor. */ - protected VectorMapJoinOperator() { + @VisibleForTesting + public VectorMapJoinOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOuterFilteredOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOuterFilteredOperator.java index 509a43f..0fe1188 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOuterFilteredOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOuterFilteredOperator.java @@ -18,15 +18,14 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import com.google.common.annotations.VisibleForTesting; + /** * This is the *NON-NATIVE* vector map join operator for just LEFT OUTER JOIN and filtered. * @@ -50,7 +49,8 @@ protected transient Object[] singleRow; /** Kryo ctor. */ - protected VectorMapJoinOuterFilteredOperator() { + @VisibleForTesting + public VectorMapJoinOuterFilteredOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java index 033be38..9f0c24e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java @@ -24,12 +24,15 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.io.Writable; +import com.google.common.annotations.VisibleForTesting; + public class VectorMapOperator extends MapOperator { private static final long serialVersionUID = 1L; /** Kryo ctor. */ - protected VectorMapOperator() { + @VisibleForTesting + public VectorMapOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java index b79a3d8..74e5130 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorReduceSinkOperator.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; @@ -29,6 +26,8 @@ import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import com.google.common.annotations.VisibleForTesting; + public class VectorReduceSinkOperator extends ReduceSinkOperator { private static final long serialVersionUID = 1L; @@ -54,7 +53,8 @@ public VectorReduceSinkOperator(CompilationOpContext ctx, } /** Kryo ctor. */ - protected VectorReduceSinkOperator() { + @VisibleForTesting + public VectorReduceSinkOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java index 9a263e6..85c8506 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java @@ -19,14 +19,10 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; @@ -40,8 +36,12 @@ import org.apache.hadoop.hive.ql.plan.SMBJoinDesc; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; -import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; +import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; /** * VectorSMBJoinOperator. @@ -91,7 +91,8 @@ } /** Kryo ctor. */ - protected VectorSMBMapJoinOperator() { + @VisibleForTesting + public VectorSMBMapJoinOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java index 8db6eba..f7fec8f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSelectOperator.java @@ -19,9 +19,7 @@ package org.apache.hadoop.hive.ql.exec.vector; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -37,6 +35,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; +import com.google.common.annotations.VisibleForTesting; + /** * Select operator implementation. */ @@ -82,7 +82,8 @@ public VectorSelectOperator(CompilationOpContext ctx, } /** Kryo ctor. */ - protected VectorSelectOperator() { + @VisibleForTesting + public VectorSelectOperator() { super(); } @@ -147,10 +148,6 @@ public void process(Object row, int tag) throws HiveException { vrg.projectedColumns = originalProjections; } - static public String getOperatorName() { - return "SEL"; - } - public VectorExpression[] getvExpressions() { return vExpressions; } @@ -176,4 +173,14 @@ public VectorizationContext getOuputVectorizationContext() { public OperatorType getType() { return OperatorType.SELECT; } + + @Override + public String getName() { + return getOperatorName(); + } + + static public String getOperatorName() { + return "SEL"; + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkHashTableSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkHashTableSinkOperator.java index 1e550e7..3d0b571 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkHashTableSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkHashTableSinkOperator.java @@ -26,8 +26,7 @@ import org.apache.hadoop.hive.ql.plan.SparkHashTableSinkDesc; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; -import java.util.Collection; -import java.util.concurrent.Future; +import com.google.common.annotations.VisibleForTesting; /** * Vectorized version of SparkHashTableSinkOperator @@ -52,7 +51,8 @@ protected transient Object[] singleRow; /** Kryo ctor. */ - protected VectorSparkHashTableSinkOperator() { + @VisibleForTesting + public VectorSparkHashTableSinkOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkPartitionPruningSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkPartitionPruningSinkOperator.java index 2f02250..e7ac531 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkPartitionPruningSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSparkPartitionPruningSinkOperator.java @@ -18,9 +18,6 @@ package org.apache.hadoop.hive.ql.exec.vector; -import java.util.Collection; -import java.util.concurrent.Future; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -31,6 +28,8 @@ import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.io.Writable; +import com.google.common.annotations.VisibleForTesting; + /** * Vectorized version for SparkPartitionPruningSinkOperator. * Forked from VectorAppMasterEventOperator. @@ -55,7 +54,8 @@ public VectorSparkPartitionPruningSinkOperator(CompilationOpContext ctx, } /** Kryo ctor. */ - protected VectorSparkPartitionPruningSinkOperator() { + @VisibleForTesting + public VectorSparkPartitionPruningSinkOperator() { super(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java index 3f31fb5..dd8ff01 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkPartitionPruningSinkOperator.java @@ -21,11 +21,7 @@ import java.io.BufferedOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; -import java.util.Collection; -import java.util.concurrent.Future; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -36,11 +32,15 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.optimizer.spark.SparkPartitionPruningSinkDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; +import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.hive.serde2.Serializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; /** * This operator gets partition info from the upstream operators, and write them @@ -55,7 +55,8 @@ protected static final Logger LOG = LoggerFactory.getLogger(SparkPartitionPruningSinkOperator.class); /** Kryo ctor. */ - protected SparkPartitionPruningSinkOperator() { + @VisibleForTesting + public SparkPartitionPruningSinkOperator() { super(); } @@ -63,6 +64,7 @@ public SparkPartitionPruningSinkOperator(CompilationOpContext ctx) { super(ctx); } + @Override @SuppressWarnings("deprecation") public void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); @@ -141,7 +143,7 @@ public OperatorType getType() { @Override public String getName() { - return getOperatorName(); + return SparkPartitionPruningSinkOperator.getOperatorName(); } public static String getOperatorName() { diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java new file mode 100644 index 0000000..e936380 --- /dev/null +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperatorNames.java @@ -0,0 +1,98 @@ +package org.apache.hadoop.hive.ql.exec; + +import junit.framework.TestCase; + +import org.apache.hadoop.hive.ql.exec.vector.VectorAppMasterEventOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorFilterOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorGroupByOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorLimitOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorMapOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorSMBMapJoinOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorSparkHashTableSinkOperator; +import org.apache.hadoop.hive.ql.exec.vector.VectorSparkPartitionPruningSinkOperator; +import org.apache.hadoop.hive.ql.parse.spark.SparkPartitionPruningSinkOperator; +import org.junit.Test; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with this + * work for additional information regarding copyright ownership. The ASF + * licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +public class TestOperatorNames extends TestCase { + + public TestOperatorNames(String name) { + super(name); + } + + @Override + protected void setUp() throws Exception { + super.setUp(); + } + + @Override + protected void tearDown() throws Exception { + super.tearDown(); + } + + /* + * If there's a mismatch between static and object name, or a mismatch between + * vector and non-vector operator name, the optimizer doens't work correctly. + */ + @Test + public void testOperatorNames() throws Exception { + + assertEquals(SelectOperator.getOperatorName(), new SelectOperator().getName()); + assertEquals(SelectOperator.getOperatorName(), new VectorSelectOperator().getName()); + + assertEquals(GroupByOperator.getOperatorName(), new GroupByOperator().getName()); + assertEquals(GroupByOperator.getOperatorName(), new VectorGroupByOperator().getName()); + + assertEquals(FilterOperator.getOperatorName(), new FilterOperator().getName()); + assertEquals(FilterOperator.getOperatorName(), new VectorFilterOperator().getName()); + + assertEquals(LimitOperator.getOperatorName(), new LimitOperator().getName()); + assertEquals(LimitOperator.getOperatorName(), new VectorLimitOperator().getName()); + + assertEquals(MapOperator.getOperatorName(), new MapOperator().getName()); + assertEquals(MapOperator.getOperatorName(), new VectorMapOperator().getName()); + + assertEquals(MapJoinOperator.getOperatorName(), new MapJoinOperator().getName()); + assertEquals(MapJoinOperator.getOperatorName(), new VectorMapJoinOperator().getName()); + + assertEquals(AppMasterEventOperator.getOperatorName(), new AppMasterEventOperator().getName()); + assertEquals(AppMasterEventOperator.getOperatorName(), + new VectorAppMasterEventOperator().getName()); + + assertEquals(SMBMapJoinOperator.getOperatorName(), new SMBMapJoinOperator().getName()); + assertEquals(SMBMapJoinOperator.getOperatorName(), new VectorSMBMapJoinOperator().getName()); + + assertEquals(MapJoinOperator.getOperatorName(), + new VectorMapJoinOuterFilteredOperator().getName()); + + assertEquals(SparkHashTableSinkOperator.getOperatorName(), + new SparkHashTableSinkOperator().getName()); + assertEquals(SparkHashTableSinkOperator.getOperatorName(), + new VectorSparkHashTableSinkOperator().getName()); + + assertEquals(SparkPartitionPruningSinkOperator.getOperatorName(), + new SparkPartitionPruningSinkOperator().getName()); + assertEquals(SparkPartitionPruningSinkOperator.getOperatorName(), + new VectorSparkPartitionPruningSinkOperator().getName()); + + } + +} diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java index 74e077b..bdf911c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeCaptureOutputOperator.java @@ -20,9 +20,7 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -102,4 +100,12 @@ public OperatorType getType() { return null; } + @Override + public String getName() { + return FakeCaptureOutputOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "FAKE_CAPTURE"; + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java index d06d214..a2032bf 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/FakeVectorDataSourceOperator.java @@ -20,9 +20,7 @@ import java.io.Serializable; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.concurrent.Future; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.CompilationOpContext; @@ -89,4 +87,13 @@ public void process(Object row, int tag) throws HiveException { public OperatorType getType() { return null; } + + @Override + public String getName() { + return FakeVectorDataSourceOperator.getOperatorName(); + } + + public static String getOperatorName() { + return "FAKE_VECTOR_DS"; + } }