diff --git a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index d9b7031..3009358 100644 --- a/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ b/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -52,9 +52,9 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.VariableSubstitution; import org.apache.hadoop.hive.ql.processors.CommandProcessor; diff --git a/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out b/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out index 7b3ea29..e0e4258 100644 --- a/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out +++ b/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out @@ -25,4 +25,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index a5a867a..5350212 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -46,7 +46,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.exec.ConditionalTask; -import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; @@ -193,7 +192,7 @@ public void init() { public ClusterStatus getClusterStatus() throws Exception { ClusterStatus cs; try { - JobConf job = new JobConf(conf, ExecDriver.class); + JobConf job = new JobConf(conf); JobClient jc = new JobClient(job); cs = jc.getClusterStatus(); } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java index 23cdfc2..1b3a226 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java @@ -38,10 +38,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.ConditionalTask; -import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.hooks.LineageInfo; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java index 6935738..9de7dcf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java @@ -74,7 +74,7 @@ public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { super.initialize(conf, queryPlan, ctx); work.initializeForFetch(); try { - JobConf job = new JobConf(conf, ExecDriver.class); + JobConf job = new JobConf(conf); ftOp = new FetchOperator(work.getfWork(), job); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java index b4da80c..e4d5448 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeGenericFuncEvaluator.java @@ -120,7 +120,7 @@ public ObjectInspector initialize(ObjectInspector rowInspector) throws HiveExcep for (int i = 0; i < children.length; i++) { childrenOIs[i] = children[i].initialize(rowInspector); } - MapredContext context = MapredContext.get(); + UDFContext context = UDFContext.get(); if (context != null) { context.setup(genericUDF); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index beb04fc..b28d16e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader; import org.apache.hadoop.hive.ql.io.HiveInputFormat; import org.apache.hadoop.hive.ql.io.HiveRecordReader; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java index 6e9e0a8..df2ccf1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java @@ -65,7 +65,7 @@ public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { try { // Create a file system handle - JobConf job = new JobConf(conf, ExecDriver.class); + JobConf job = new JobConf(conf); Operator source = work.getSource(); if (source instanceof TableScanOperator) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java index 0bf6add..ab197f0 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java @@ -338,7 +338,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { jc = (JobConf) hconf; } else { // test code path - jc = new JobConf(hconf, ExecDriver.class); + jc = new JobConf(hconf); } if (multiFileSpray) { @@ -808,7 +808,7 @@ protected void fatalErrorMessage(StringBuilder errMsg, long counterCode) { private String lsDir() { String specPath = conf.getDirName(); // need to get a JobConf here because it's not passed through at client side - JobConf jobConf = new JobConf(ExecDriver.class); + JobConf jobConf = new JobConf(); Path tmpPath = Utilities.toTempPath(specPath); StringBuilder sb = new StringBuilder("\n"); try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java index 532a49b..807518a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java @@ -339,7 +339,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { for (ExprNodeEvaluator keyField : keyFields) { objectInspectors.add(null); } - MapredContext context = MapredContext.get(); + UDFContext context = UDFContext.get(); if (context != null) { for (GenericUDAFEvaluator genericUDAFEvaluator : aggregationEvaluators) { context.setup(genericUDAFEvaluator); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java index 8695ee1..cf8bd9d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java @@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.plan.MapredWork; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index ac8e167..a271279 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -40,6 +40,8 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; +import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask; import org.apache.hadoop.hive.ql.hooks.LineageInfo.DataContainer; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java index 564e166..95f1e2c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java @@ -31,6 +31,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapperContext; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java index 72927b5..6ee13ec 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java @@ -200,7 +200,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { jc = (JobConf) hconf; } else { // test code path - jc = new JobConf(hconf, ExecDriver.class); + jc = new JobConf(hconf); } currentStat = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java index 17387a9..7529890 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java @@ -31,7 +31,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.CommandNeedRetryException; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.lib.Node; @@ -50,6 +49,8 @@ public abstract class Task implements Serializable, Node { private static final long serialVersionUID = 1L; + public transient HashMap taskCounters; + public transient TaskHandle taskHandle; protected transient boolean started; protected transient boolean initialized; protected transient boolean isdone; @@ -58,8 +59,6 @@ protected transient Hive db; protected transient LogHelper console; protected transient QueryPlan queryPlan; - protected transient TaskHandle taskHandle; - protected transient HashMap taskCounters; protected transient DriverContext driverContext; protected transient boolean clonedConf = false; protected transient String jobID; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java index 368f677..d0807d2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java @@ -23,6 +23,8 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; +import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask; import org.apache.hadoop.hive.ql.io.rcfile.merge.BlockMergeTask; import org.apache.hadoop.hive.ql.io.rcfile.merge.MergeWork; import org.apache.hadoop.hive.ql.io.rcfile.stats.PartialScanTask; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFContext.java similarity index 69% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/UDFContext.java index 0c8a6f2..f444e02 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDFContext.java @@ -29,77 +29,44 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.Reporter; /** * Runtime context of MapredTask providing additional information to GenericUDF */ -public class MapredContext { +public class UDFContext { - private static final Log logger = LogFactory.getLog("MapredContext"); - private static final ThreadLocal contexts = new ThreadLocal(); + private static final Log logger = LogFactory.getLog(UDFContext.class); + private static final ThreadLocal contexts = new ThreadLocal(); - static MapredContext get() { + public static UDFContext get() { return contexts.get(); } - static MapredContext init(boolean isMap, JobConf jobConf) { - MapredContext context = new MapredContext(isMap, jobConf); + public static UDFContext init() { + UDFContext context = new UDFContext(); contexts.set(context); return context; } - static void close() { - MapredContext context = contexts.get(); + public static void close() { + UDFContext context = contexts.get(); if (context != null) { context.closeAll(); } contexts.remove(); } - private final boolean isMap; - private final JobConf jobConf; private final List udfs; - private Reporter reporter; - - private MapredContext(boolean isMap, JobConf jobConf) { - this.isMap = isMap; - this.jobConf = jobConf; + private UDFContext() { this.udfs = new ArrayList(); } - /** - * Returns whether the UDF is called from Map or Reduce task. - */ - public boolean isMap() { - return isMap; - } - - /** - * Returns Reporter, which is set right before reading the first row. - */ - public Reporter getReporter() { - return reporter; - } - - /** - * Returns JobConf. - */ - public JobConf getJobConf() { - return jobConf; - } - - void setReporter(Reporter reporter) { - this.reporter = reporter; - } - private void registerCloseable(Closeable closeable) { udfs.add(closeable); } - private void closeAll() { + public void closeAll() { for (Closeable eval : udfs) { try { eval.close(); @@ -110,7 +77,7 @@ private void closeAll() { udfs.clear(); } - void setup(GenericUDF genericUDF) { + public void setup(GenericUDF genericUDF) { if (needConfigure(genericUDF)) { genericUDF.configure(this); } @@ -119,7 +86,7 @@ void setup(GenericUDF genericUDF) { } } - void setup(GenericUDAFEvaluator genericUDAF) { + public void setup(GenericUDAFEvaluator genericUDAF) { if (needConfigure(genericUDAF)) { genericUDAF.configure(this); } @@ -128,7 +95,7 @@ void setup(GenericUDAFEvaluator genericUDAF) { } } - void setup(GenericUDTF genericUDTF) { + public void setup(GenericUDTF genericUDTF) { if (needConfigure(genericUDTF)) { genericUDTF.configure(this); } @@ -137,7 +104,7 @@ void setup(GenericUDTF genericUDTF) { private boolean needConfigure(Object func) { try { - Method initMethod = func.getClass().getMethod("configure", MapredContext.class); + Method initMethod = func.getClass().getMethod("configure", UDFContext.class); return initMethod.getDeclaringClass() != GenericUDF.class && initMethod.getDeclaringClass() != GenericUDAFEvaluator.class && initMethod.getDeclaringClass() != GenericUDTF.class; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java index 3d26a7c..4c01b87 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/UDTFOperator.java @@ -74,7 +74,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { } objToSendToUDTF = new Object[inputFields.size()]; - MapredContext context = MapredContext.get(); + UDFContext context = UDFContext.get(); if (context != null) { context.setup(genericUDTF); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 68ec54a..b789d78 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -97,6 +97,8 @@ import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat; @@ -1404,7 +1406,7 @@ private static void createEmptyBuckets(Configuration hconf, ArrayList pa jc = new JobConf(hconf); } else { // test code path - jc = new JobConf(hconf, ExecDriver.class); + jc = new JobConf(hconf); } HiveOutputFormat hiveOutputFormat = null; Class outputClass = null; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 14304ca..9fbabae 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.File; import java.io.IOException; @@ -54,6 +54,15 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.exec.FetchOperator; +import org.apache.hadoop.hive.ql.exec.FileSinkOperator; +import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner; +import org.apache.hadoop.hive.ql.exec.JobCloseFeedBack; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.PartitionKeySampler; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat; import org.apache.hadoop.hive.ql.io.HiveKey; @@ -93,7 +102,12 @@ import org.apache.log4j.varia.NullAppender; /** - * ExecDriver. + * ExecDriver is the central class in co-ordinating execution of any map-reduce task. + * It's main responsabilities are: + * + * - Converting the plan (MapredWork) into a MR Job (JobConf) + * - Submitting a MR job to the cluster via JobClient and ExecHelper + * - Executing MR job in local execution mode (where applicable) * */ public class ExecDriver extends Task implements Serializable, HadoopJobExecHook { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java similarity index 89% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java index 08ffcb1..d48a47f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapper.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -28,6 +28,11 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.exec.FetchOperator; +import org.apache.hadoop.hive.ql.exec.MapOperator; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.UDFContext; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.MapredLocalWork; import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.OperatorDesc; @@ -38,8 +43,16 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.StringUtils; + /** - * ExecMapper. + * ExecMapper is the generic Map class for Hive. Together with ExecReducer it is + * the bridge between the map-reduce framework and the Hive operator pipeline at + * execution time. It's main responsabilities are: + * + * - Load and setup the operator pipeline from XML + * - Run the pipeline by transforming key value pairs to records and forwarding them to the operators + * - Stop execution when the "limit" is reached + * - Catch and handle errors during execution of the operators. * */ public class ExecMapper extends MapReduceBase implements Mapper { @@ -50,7 +63,7 @@ private JobConf jc; private boolean abort = false; private Reporter rp; - public static final Log l4j = LogFactory.getLog("ExecMapper"); + public static final Log l4j = LogFactory.getLog(ExecMapper.class); private static boolean done; // used to log memory usage periodically @@ -93,7 +106,7 @@ public void configure(JobConf job) { localWork = mrwork.getMapLocalWork(); execContext.setLocalWork(localWork); - MapredContext.init(true, new JobConf(jc)); + UDFContext.init(); mo.setExecContext(execContext); mo.initializeLocalWork(jc); @@ -130,7 +143,6 @@ public void map(Object key, Object value, OutputCollector output, rp = reporter; mo.setOutputCollector(oc); mo.setReporter(rp); - MapredContext.get().setReporter(reporter); } // reset the execContext for each new row execContext.resetRow(); @@ -227,7 +239,7 @@ public void close() { throw new RuntimeException("Hive Runtime Error while closing operators", e); } } finally { - MapredContext.close(); + UDFContext.close(); } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java index 1a4c4ef..0191aef 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapperContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecMapperContext.java @@ -15,11 +15,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.util.Map; import org.apache.commons.logging.Log; +import org.apache.hadoop.hive.ql.exec.FetchOperator; import org.apache.hadoop.hive.ql.io.IOContext; import org.apache.hadoop.hive.ql.plan.MapredLocalWork; import org.apache.hadoop.mapred.JobConf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java similarity index 93% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java index 89d9643..0cbedfc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecReducer.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.lang.management.ManagementFactory; @@ -25,11 +25,13 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; -import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.ql.exec.ExecMapper.reportStats; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.UDFContext; +import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapper.reportStats; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapredWork; import org.apache.hadoop.hive.ql.plan.TableDesc; @@ -51,7 +53,14 @@ import org.apache.hadoop.util.StringUtils; /** - * ExecReducer. + * ExecReducer is the generic Reducer class for Hive. Together with ExecMapper it is + * the bridge between the map-reduce framework and the Hive operator pipeline at + * execution time. It's main responsabilities are: + * + * - Load and setup the operator pipeline from XML + * - Run the pipeline by transforming key, value pairs to records and forwarding them to the operators + * - Sending start and end group messages to separate records with same key from one another + * - Catch and handle errors during execution of the operators. * */ public class ExecReducer extends MapReduceBase implements Reducer { @@ -145,7 +154,7 @@ public void configure(JobConf job) { throw new RuntimeException(e); } - MapredContext.init(false, new JobConf(jc)); + UDFContext.init(); // initialize reduce operator tree try { @@ -181,7 +190,6 @@ public void reduce(Object key, Iterator values, OutputCollector output, rp = reporter; reducer.setOutputCollector(oc); reducer.setReporter(rp); - MapredContext.get().setReporter(reporter); } try { @@ -318,7 +326,7 @@ public void close() { + e.getMessage(), e); } } finally { - MapredContext.close(); + UDFContext.close(); } } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java similarity index 99% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index effc540..cd872b2 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.io.Serializable; @@ -35,6 +35,9 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.MapRedStats; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.TaskHandle; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Operator.ProgressCounter; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.plan.ReducerTimeStatsPerJob; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java similarity index 96% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java index b8bc5e4..02ff977 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHook.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHook.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java similarity index 99% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java index 984a37f..7b77944 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.lang.Exception; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java similarity index 97% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java index 63c29cd..f256013 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobTrackerURLResolver.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.net.InetSocketAddress; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java similarity index 99% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java index b2127fc..9676e7e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.File; import java.io.IOException; @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapredWork; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java similarity index 95% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java index 6bbcb26..f72ecfb 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapredLocalTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapredLocalTask.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.File; import java.io.IOException; @@ -43,6 +43,13 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; +import org.apache.hadoop.hive.ql.exec.BucketMatcher; +import org.apache.hadoop.hive.ql.exec.FetchOperator; +import org.apache.hadoop.hive.ql.exec.HashTableSinkOperator; +import org.apache.hadoop.hive.ql.exec.Operator; +import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter; import org.apache.hadoop.hive.ql.exec.persistence.AbstractMapJoinKey; import org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper; @@ -63,6 +70,15 @@ import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.ReflectionUtils; + +/** + * MapredLocalTask represents any local work (i.e.: client side work) that hive needs to + * execute. E.g.: This is used for generating Hashtables for Mapjoins on the client + * before the Join is executed on the cluster. + * + * MapRedLocalTask does not actually execute the work in process, but rather generates + * a command using ExecDriver. ExecDriver is what will finally drive processing the records. + */ public class MapredLocalTask extends Task implements Serializable { private Map fetchOperators; @@ -202,7 +218,7 @@ public int execute(DriverContext driverContext) { // This will be used by hadoop only in unsecure(/non kerberos) mode HadoopShims shim = ShimLoader.getHadoopShims(); String endUserName = shim.getShortUserName(shim.getUGIForConf(job)); - console.printInfo("setting HADOOP_USER_NAME\t" + endUserName); + LOG.debug("setting HADOOP_USER_NAME\t" + endUserName); variables.put("HADOOP_USER_NAME", endUserName); if (variables.containsKey(HADOOP_OPTS_KEY)) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java similarity index 89% rename from ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java rename to ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java index b240125..f582c39 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Throttle.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/Throttle.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.exec; +package org.apache.hadoop.hive.ql.exec.mr; import java.io.IOException; import java.io.InputStream; @@ -76,18 +76,18 @@ public static void checkJobTracker(JobConf conf, Log LOG) { // fetch the xml tag xxx Pattern dowait = Pattern.compile("", Pattern.CASE_INSENSITIVE - | Pattern.DOTALL | Pattern.MULTILINE); + | Pattern.DOTALL | Pattern.MULTILINE); String[] results = dowait.split(fetchString); if (results.length != 2) { throw new IOException("Throttle: Unable to parse response of URL " - + url + ". Get retuned " + fetchString); + + url + ". Get retuned " + fetchString); } dowait = Pattern.compile("", Pattern.CASE_INSENSITIVE - | Pattern.DOTALL | Pattern.MULTILINE); + | Pattern.DOTALL | Pattern.MULTILINE); results = dowait.split(results[1]); if (results.length < 1) { throw new IOException("Throttle: Unable to parse response of URL " - + url + ". Get retuned " + fetchString); + + url + ". Get retuned " + fetchString); } // if the jobtracker signalled that the threshold is not exceeded, @@ -99,7 +99,7 @@ public static void checkJobTracker(JobConf conf, Log LOG) { // The JobTracker has exceeded its threshold and is doing a GC. // The client has to wait and retry. LOG.warn("Job is being throttled because of resource crunch on the " - + "JobTracker. Will retry in " + retry + " seconds.."); + + "JobTracker. Will retry in " + retry + " seconds.."); Thread.sleep(retry * 1000L); } } catch (Exception e) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java index e81ad9c..481deba 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveInputSplit.java @@ -37,7 +37,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.hive.ql.exec.ExecMapper; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Utilities; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java index 9ea812b..6318b2f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java @@ -21,7 +21,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.ql.exec.ExecMapper; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapper; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.CombineHiveInputSplit; import org.apache.hadoop.hive.shims.HadoopShims.InputSplitShim; import org.apache.hadoop.io.Writable; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java index 01286ef..049aee4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveRecordReader.java @@ -20,7 +20,7 @@ import java.io.IOException; -import org.apache.hadoop.hive.ql.exec.ExecMapper; +import org.apache.hadoop.hive.ql.exec.mr.ExecMapper; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobConf; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java index cbee423..ad14966 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/BlockMergeTask.java @@ -35,11 +35,11 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHook; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Throttle; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook; +import org.apache.hadoop.hive.ql.exec.mr.Throttle; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl; import org.apache.hadoop.hive.ql.plan.api.StageType; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index a1abf90..f66b82e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -36,11 +36,11 @@ import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHook; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Throttle; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook; +import org.apache.hadoop.hive.ql.exec.mr.Throttle; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl; import org.apache.hadoop.hive.ql.metadata.HiveException; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index a9cd8ac..6beb54d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -28,11 +28,11 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper; -import org.apache.hadoop.hive.ql.exec.HadoopJobExecHook; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Throttle; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper; +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHook; +import org.apache.hadoop.hive.ql.exec.mr.Throttle; import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat; import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl; import org.apache.hadoop.hive.ql.plan.api.StageType; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index 4acf476..01fbca5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -33,7 +33,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.ConditionalTask; -import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; @@ -43,6 +42,7 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java index ff687c9..c876ab7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/AbstractJoinTaskDispatcher.java @@ -27,9 +27,9 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.ConditionalTask; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.TaskGraphWalker.TaskGraphWalkerContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java index b6e92b6..35dfdc5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/BucketingSortingInferenceOptimizer.java @@ -23,7 +23,6 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.ExtractOperator; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -36,6 +35,7 @@ import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.lib.GraphWalker; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java index da9423e..6d78e33 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CommonJoinTaskDispatcher.java @@ -34,12 +34,12 @@ import org.apache.hadoop.hive.ql.exec.ConditionalTask; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils; import org.apache.hadoop.hive.ql.optimizer.MapJoinProcessor; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java index cb07728..efaeccf 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MapJoinResolver.java @@ -28,12 +28,12 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.exec.ConditionalTask; -import org.apache.hadoop.hive.ql.exec.MapredLocalTask; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java index a5afbb9..c0c232e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SamplingOptimizer.java @@ -20,12 +20,12 @@ package org.apache.hadoop.hive.ql.optimizer.physical; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorUtils; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.MapredWork; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java index 8e1bafe..af56857 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java @@ -35,13 +35,13 @@ import org.apache.hadoop.hive.ql.exec.DummyStoreOperator; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.lib.Dispatcher; import org.apache.hadoop.hive.ql.optimizer.MapJoinProcessor; import org.apache.hadoop.hive.ql.parse.OpParseContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java index 02b126f..8bce7c3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java @@ -31,10 +31,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.metastore.api.Index; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.index.HiveIndexQueryContext; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java index 94607d1..a8d1a68 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java @@ -40,7 +40,6 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.ql.exec.FilterOperator; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 8cf9dff..0bad4be 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -59,7 +59,6 @@ import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.ColumnStatsTask; import org.apache.hadoop.hive.ql.exec.ConditionalTask; -import org.apache.hadoop.hive.ql.exec.ExecDriver; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FunctionInfo; @@ -67,7 +66,6 @@ import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.JoinOperator; import org.apache.hadoop.hive.ql.exec.MapJoinOperator; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.RecordReader; @@ -83,6 +81,8 @@ import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.exec.WindowFunctionInfo; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java index 5668a3b..7ca96e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java @@ -21,7 +21,7 @@ import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.hive.ql.exec.MapredContext; +import org.apache.hadoop.hive.ql.exec.UDFContext; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.UDFType; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -90,12 +90,12 @@ public GenericUDAFEvaluator() { } /** - * Additionally setup GenericUDAFEvaluator with MapredContext before initializing. + * Additionally setup GenericUDAFEvaluator with UDFContext before initializing. * This is only called in runtime of MapRedTask. * * @param context context */ - public void configure(MapredContext mapredContext) { + public void configure(UDFContext mapredContext) { } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java index 3d1f55e..e6aa032 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java @@ -21,8 +21,8 @@ import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.hive.ql.exec.MapredContext; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; +import org.apache.hadoop.hive.ql.exec.UDFContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.UDFType; @@ -95,12 +95,12 @@ public abstract ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException; /** - * Additionally setup GenericUDF with MapredContext before initializing. + * Additionally setup GenericUDF with UDFContext before initializing. * This is only called in runtime of MapRedTask. * * @param context context */ - public void configure(MapredContext context) { + public void configure(UDFContext context) { } /** diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java index 9956f4e..81ab0db 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.ql.udf.generic; -import org.apache.hadoop.hive.ql.exec.MapredContext; +import org.apache.hadoop.hive.ql.exec.UDFContext; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -35,12 +35,12 @@ Collector collector = null; /** - * Additionally setup GenericUDTF with MapredContext before initializing. + * Additionally setup GenericUDTF with UDFContext before initializing. * This is only called in runtime of MapRedTask. * * @param context context */ - public void configure(MapredContext mapredContext) { + public void configure(UDFContext mapredContext) { } /** diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 3779bad..6a74ae4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -31,6 +31,8 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.ql.DriverContext; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Table; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java index bc44af5..ad231c5 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java @@ -35,9 +35,9 @@ import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.ExecDriver; -import org.apache.hadoop.hive.ql.exec.MapRedTask; import org.apache.hadoop.hive.ql.exec.Utilities; +import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; +import org.apache.hadoop.hive.ql.exec.mr.MapRedTask; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java deleted file mode 100644 index c521b10..0000000 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/DummyContextUDF.java +++ /dev/null @@ -1,55 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.udf.generic; - -import org.apache.hadoop.hive.ql.exec.MapredContext; -import org.apache.hadoop.hive.ql.exec.UDFArgumentException; -import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; -import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; -import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.mapred.Counters; -import org.apache.hadoop.mapred.Reporter; - -public class DummyContextUDF extends GenericUDF { - - private MapredContext context; - private LongWritable result = new LongWritable(); - - public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { - return PrimitiveObjectInspectorFactory.writableLongObjectInspector; - } - - public Object evaluate(DeferredObject[] arguments) throws HiveException { - Reporter reporter = context.getReporter(); - Counters.Counter counter = reporter.getCounter( - "org.apache.hadoop.mapred.Task$Counter", "MAP_INPUT_RECORDS"); - result.set(counter.getValue()); - return result; - } - - public String getDisplayString(String[] children) { - return "dummy-func()"; - } - - @Override - public void configure(MapredContext context) { - this.context = context; - } -} diff --git a/ql/src/test/results/clientnegative/autolocal1.q.out b/ql/src/test/results/clientnegative/autolocal1.q.out index 4f12bf8..d9d5ed0 100644 --- a/ql/src/test/results/clientnegative/autolocal1.q.out +++ b/ql/src/test/results/clientnegative/autolocal1.q.out @@ -12,4 +12,4 @@ SELECT key FROM src PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/cachingprintstream.q.out b/ql/src/test/results/clientnegative/cachingprintstream.q.out index ed97ea9..d231136 100644 --- a/ql/src/test/results/clientnegative/cachingprintstream.q.out +++ b/ql/src/test/results/clientnegative/cachingprintstream.q.out @@ -34,4 +34,4 @@ Logs: #### A masked pattern was here #### End cached logs. -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out b/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out index 457980a..747aa6a 100644 --- a/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out +++ b/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out @@ -11,4 +11,4 @@ SELECT evaluate_npe(src.key) LIMIT 1 PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/dyn_part3.q.out b/ql/src/test/results/clientnegative/dyn_part3.q.out index 344ae24..e9122f2 100644 --- a/ql/src/test/results/clientnegative/dyn_part3.q.out +++ b/ql/src/test/results/clientnegative/dyn_part3.q.out @@ -17,4 +17,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out b/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out index 3a3bb1e..c53199c 100644 --- a/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out +++ b/ql/src/test/results/clientnegative/dyn_part_max_per_node.q.out @@ -28,4 +28,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/fatal.q.out b/ql/src/test/results/clientnegative/fatal.q.out index 2a1bb96..3baed96 100644 --- a/ql/src/test/results/clientnegative/fatal.q.out +++ b/ql/src/test/results/clientnegative/fatal.q.out @@ -2,4 +2,4 @@ PREHOOK: query: select /*+ mapjoin(b) */ * from src a join src b on (a.key=b.key PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: file:/tmp/nzhang/hive_2010-08-02_13-41-52_752_1156521578782717030/-mr-10000 -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out b/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out index ff408b1..7456001 100644 --- a/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out +++ b/ql/src/test/results/clientnegative/index_compact_entry_limit.q.out @@ -42,4 +42,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/index_compact_size_limit.q.out b/ql/src/test/results/clientnegative/index_compact_size_limit.q.out index ff408b1..7456001 100644 --- a/ql/src/test/results/clientnegative/index_compact_size_limit.q.out +++ b/ql/src/test/results/clientnegative/index_compact_size_limit.q.out @@ -42,4 +42,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out b/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out index c4e9909..1b9f5a6 100644 --- a/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out +++ b/ql/src/test/results/clientnegative/local_mapred_error_cache.q.out @@ -20,4 +20,4 @@ org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurr org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script. #### A masked pattern was here #### Error during job, obtaining debugging information... -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out b/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out index e1cd0c5..dfc8f54 100644 --- a/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out +++ b/ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out @@ -2,4 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exi PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script. +FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script. diff --git a/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out b/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out index 592ecda..69a1bbb 100644 --- a/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out +++ b/ql/src/test/results/clientnegative/mapreduce_stack_trace_hadoop20.q.out @@ -10,4 +10,4 @@ FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Hive Runtime Error while processing row {"key":"238","value":"val_238"} FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"} Hive Runtime Error while processing row {"key":"238","value":"val_238"} -FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script. +FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script. diff --git a/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out b/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out index e1cd0c5..dfc8f54 100644 --- a/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out +++ b/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out @@ -2,4 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exi PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script. +FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script. diff --git a/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out b/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out index e1cd0c5..dfc8f54 100644 --- a/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out +++ b/ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff_hadoop20.q.out @@ -2,4 +2,4 @@ PREHOOK: query: FROM src SELECT TRANSFORM(key, value) USING 'script_does_not_exi PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script. +FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. Unable to initialize custom script. diff --git a/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out b/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out index 5b0db2c..ae31487 100644 --- a/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out +++ b/ql/src/test/results/clientnegative/minimr_broken_pipe.q.out @@ -3,4 +3,4 @@ SELECT TRANSFORM(*) USING 'true' AS a, b FROM (SELECT TRANSFORM(*) USING 'echo' PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 20003 from org.apache.hadoop.hive.ql.exec.MapRedTask. An error occurred when trying to close the Operator running your custom script. +FAILED: Execution Error, return code 20003 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask. An error occurred when trying to close the Operator running your custom script. diff --git a/ql/src/test/results/clientnegative/script_broken_pipe1.q.out b/ql/src/test/results/clientnegative/script_broken_pipe1.q.out index 9312331..073933c 100644 --- a/ql/src/test/results/clientnegative/script_broken_pipe1.q.out +++ b/ql/src/test/results/clientnegative/script_broken_pipe1.q.out @@ -13,4 +13,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/script_broken_pipe2.q.out b/ql/src/test/results/clientnegative/script_broken_pipe2.q.out index 89a824e..e29e115 100644 --- a/ql/src/test/results/clientnegative/script_broken_pipe2.q.out +++ b/ql/src/test/results/clientnegative/script_broken_pipe2.q.out @@ -13,4 +13,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/script_broken_pipe3.q.out b/ql/src/test/results/clientnegative/script_broken_pipe3.q.out index 15a6997..1bf4fb2 100644 --- a/ql/src/test/results/clientnegative/script_broken_pipe3.q.out +++ b/ql/src/test/results/clientnegative/script_broken_pipe3.q.out @@ -13,4 +13,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/script_error.q.out b/ql/src/test/results/clientnegative/script_error.q.out index a6ba887..e2577ca 100644 --- a/ql/src/test/results/clientnegative/script_error.q.out +++ b/ql/src/test/results/clientnegative/script_error.q.out @@ -59,4 +59,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/serde_regex2.q.out b/ql/src/test/results/clientnegative/serde_regex2.q.out index 5db75aa..a19dcd8 100644 --- a/ql/src/test/results/clientnegative/serde_regex2.q.out +++ b/ql/src/test/results/clientnegative/serde_regex2.q.out @@ -64,4 +64,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out b/ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out index 0724e40..2ba99b1 100644 --- a/ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out +++ b/ql/src/test/results/clientnegative/stats_aggregator_error_2.q.out @@ -37,4 +37,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/stats_publisher_error_1.q.out b/ql/src/test/results/clientnegative/stats_publisher_error_1.q.out index d3256b9..1d4e681 100644 --- a/ql/src/test/results/clientnegative/stats_publisher_error_1.q.out +++ b/ql/src/test/results/clientnegative/stats_publisher_error_1.q.out @@ -39,4 +39,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/stats_publisher_error_2.q.out b/ql/src/test/results/clientnegative/stats_publisher_error_2.q.out index b572151..5284672 100644 --- a/ql/src/test/results/clientnegative/stats_publisher_error_2.q.out +++ b/ql/src/test/results/clientnegative/stats_publisher_error_2.q.out @@ -37,4 +37,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/udf_assert_true.q.out b/ql/src/test/results/clientnegative/udf_assert_true.q.out index 80368ed..1bfcd95 100644 --- a/ql/src/test/results/clientnegative/udf_assert_true.q.out +++ b/ql/src/test/results/clientnegative/udf_assert_true.q.out @@ -149,4 +149,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/udf_assert_true2.q.out b/ql/src/test/results/clientnegative/udf_assert_true2.q.out index 9c2cdff..d414071 100644 --- a/ql/src/test/results/clientnegative/udf_assert_true2.q.out +++ b/ql/src/test/results/clientnegative/udf_assert_true2.q.out @@ -73,4 +73,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/udf_reflect_neg.q.out b/ql/src/test/results/clientnegative/udf_reflect_neg.q.out index 74df790..d65acad 100644 --- a/ql/src/test/results/clientnegative/udf_reflect_neg.q.out +++ b/ql/src/test/results/clientnegative/udf_reflect_neg.q.out @@ -19,4 +19,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/udf_test_error.q.out b/ql/src/test/results/clientnegative/udf_test_error.q.out index fe28f4e..a788a10 100644 --- a/ql/src/test/results/clientnegative/udf_test_error.q.out +++ b/ql/src/test/results/clientnegative/udf_test_error.q.out @@ -16,4 +16,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out b/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out index 431087e..98b42e0 100644 --- a/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out +++ b/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out @@ -21,4 +21,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientnegative/udfnull.q.out b/ql/src/test/results/clientnegative/udfnull.q.out index edb94c7..06b63e7 100644 --- a/ql/src/test/results/clientnegative/udfnull.q.out +++ b/ql/src/test/results/clientnegative/udfnull.q.out @@ -16,4 +16,4 @@ Task ID: Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask diff --git a/ql/src/test/results/clientpositive/auto_join25.q.out b/ql/src/test/results/clientpositive/auto_join25.q.out index 2ccded2..f6a5a89 100644 --- a/ql/src/test/results/clientpositive/auto_join25.q.out +++ b/ql/src/test/results/clientpositive/auto_join25.q.out @@ -27,7 +27,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value where (src1.ds = '2008-04-08' or src1.ds = '2008-04-09' )and (src1.hr = '12' or src1.hr = '11') @@ -75,7 +75,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask Execution failed with exit status: 3 Obtaining error information @@ -87,7 +87,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key) INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value POSTHOOK: type: QUERY @@ -135,7 +135,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value POSTHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out b/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out index af5b840..0d50315 100644 --- a/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out +++ b/ql/src/test/results/clientpositive/infer_bucket_sort_convert_join.q.out @@ -84,7 +84,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask POSTHOOK: query: -- This test tests the scenario when the mapper dies. So, create a conditional task for the mapjoin. -- Tests a join which is not converted to a map join, the output should be bucketed and sorted. diff --git a/ql/src/test/results/clientpositive/loadpart_err.q.out b/ql/src/test/results/clientpositive/loadpart_err.q.out index 2e2ba7f..db0bfa5 100644 --- a/ql/src/test/results/clientpositive/loadpart_err.q.out +++ b/ql/src/test/results/clientpositive/loadpart_err.q.out @@ -9,7 +9,7 @@ FROM src PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@loadpart1@ds=2009-01-01 -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask PREHOOK: query: DESCRIBE loadpart1 PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE loadpart1 diff --git a/ql/src/test/results/clientpositive/mapjoin_hook.q.out b/ql/src/test/results/clientpositive/mapjoin_hook.q.out index b186755..ac5266e 100644 --- a/ql/src/test/results/clientpositive/mapjoin_hook.q.out +++ b/ql/src/test/results/clientpositive/mapjoin_hook.q.out @@ -37,7 +37,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask [MapJoinCounter PostHook] CONVERTED_LOCAL_MAPJOIN: 1 CONVERTED_MAPJOIN: 0 LOCAL_MAPJOIN: 0 COMMON_JOIN: 0 BACKUP_COMMON_JOIN: 1 PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key) INSERT OVERWRITE TABLE dest1 SELECT src1.key, src3.value @@ -55,7 +55,7 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask Execution failed with exit status: 3 Obtaining error information @@ -67,5 +67,5 @@ Logs: #### A masked pattern was here #### FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask -ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask [MapJoinCounter PostHook] CONVERTED_LOCAL_MAPJOIN: 2 CONVERTED_MAPJOIN: 0 LOCAL_MAPJOIN: 0 COMMON_JOIN: 0 BACKUP_COMMON_JOIN: 2 diff --git a/ql/src/test/results/compiler/plan/case_sensitivity.q.xml b/ql/src/test/results/compiler/plan/case_sensitivity.q.xml index 25dc6fd..1f1934f 100644 --- a/ql/src/test/results/compiler/plan/case_sensitivity.q.xml +++ b/ql/src/test/results/compiler/plan/case_sensitivity.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/cast1.q.xml b/ql/src/test/results/compiler/plan/cast1.q.xml index 7589b60..a974628 100644 --- a/ql/src/test/results/compiler/plan/cast1.q.xml +++ b/ql/src/test/results/compiler/plan/cast1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/groupby1.q.xml b/ql/src/test/results/compiler/plan/groupby1.q.xml index c93d71d..45a6233 100755 --- a/ql/src/test/results/compiler/plan/groupby1.q.xml +++ b/ql/src/test/results/compiler/plan/groupby1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + diff --git a/ql/src/test/results/compiler/plan/groupby2.q.xml b/ql/src/test/results/compiler/plan/groupby2.q.xml index b435021..095d99e 100755 --- a/ql/src/test/results/compiler/plan/groupby2.q.xml +++ b/ql/src/test/results/compiler/plan/groupby2.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/groupby3.q.xml b/ql/src/test/results/compiler/plan/groupby3.q.xml index b2f76c7..3127fa6 100644 --- a/ql/src/test/results/compiler/plan/groupby3.q.xml +++ b/ql/src/test/results/compiler/plan/groupby3.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/groupby4.q.xml b/ql/src/test/results/compiler/plan/groupby4.q.xml index bbf4ee3..f268309 100644 --- a/ql/src/test/results/compiler/plan/groupby4.q.xml +++ b/ql/src/test/results/compiler/plan/groupby4.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/groupby5.q.xml b/ql/src/test/results/compiler/plan/groupby5.q.xml index 1a65652..66b00d4 100644 --- a/ql/src/test/results/compiler/plan/groupby5.q.xml +++ b/ql/src/test/results/compiler/plan/groupby5.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/groupby6.q.xml b/ql/src/test/results/compiler/plan/groupby6.q.xml index 08cbb56..89b5026 100644 --- a/ql/src/test/results/compiler/plan/groupby6.q.xml +++ b/ql/src/test/results/compiler/plan/groupby6.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/input1.q.xml b/ql/src/test/results/compiler/plan/input1.q.xml index 779d0d5..e2ac867 100755 --- a/ql/src/test/results/compiler/plan/input1.q.xml +++ b/ql/src/test/results/compiler/plan/input1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input2.q.xml b/ql/src/test/results/compiler/plan/input2.q.xml index e3f5909..0cba53b 100755 --- a/ql/src/test/results/compiler/plan/input2.q.xml +++ b/ql/src/test/results/compiler/plan/input2.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + @@ -578,7 +578,7 @@ - + @@ -901,7 +901,7 @@ - + @@ -1095,7 +1095,7 @@ - + @@ -1422,7 +1422,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input20.q.xml b/ql/src/test/results/compiler/plan/input20.q.xml index 0159ca5..f9c299d 100644 --- a/ql/src/test/results/compiler/plan/input20.q.xml +++ b/ql/src/test/results/compiler/plan/input20.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/input3.q.xml b/ql/src/test/results/compiler/plan/input3.q.xml index a7e2ce3..bcbb126 100755 --- a/ql/src/test/results/compiler/plan/input3.q.xml +++ b/ql/src/test/results/compiler/plan/input3.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + @@ -578,7 +578,7 @@ - + @@ -901,7 +901,7 @@ - + @@ -1095,7 +1095,7 @@ - + @@ -1422,7 +1422,7 @@ - + @@ -1593,7 +1593,7 @@ - + @@ -1830,7 +1830,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input4.q.xml b/ql/src/test/results/compiler/plan/input4.q.xml index 84044ab..4ef953b 100755 --- a/ql/src/test/results/compiler/plan/input4.q.xml +++ b/ql/src/test/results/compiler/plan/input4.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + diff --git a/ql/src/test/results/compiler/plan/input5.q.xml b/ql/src/test/results/compiler/plan/input5.q.xml index 2543cc4..3f0df71 100644 --- a/ql/src/test/results/compiler/plan/input5.q.xml +++ b/ql/src/test/results/compiler/plan/input5.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + diff --git a/ql/src/test/results/compiler/plan/input6.q.xml b/ql/src/test/results/compiler/plan/input6.q.xml index eaeefda..dd9cb76 100644 --- a/ql/src/test/results/compiler/plan/input6.q.xml +++ b/ql/src/test/results/compiler/plan/input6.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input7.q.xml b/ql/src/test/results/compiler/plan/input7.q.xml index 3fdb687..c1560f3 100644 --- a/ql/src/test/results/compiler/plan/input7.q.xml +++ b/ql/src/test/results/compiler/plan/input7.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input8.q.xml b/ql/src/test/results/compiler/plan/input8.q.xml index 91bd148..76ff843 100644 --- a/ql/src/test/results/compiler/plan/input8.q.xml +++ b/ql/src/test/results/compiler/plan/input8.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/input9.q.xml b/ql/src/test/results/compiler/plan/input9.q.xml index 1c85b9d..f2ab3ff 100644 --- a/ql/src/test/results/compiler/plan/input9.q.xml +++ b/ql/src/test/results/compiler/plan/input9.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input_part1.q.xml b/ql/src/test/results/compiler/plan/input_part1.q.xml index 18ea3fe..392b3ab 100644 --- a/ql/src/test/results/compiler/plan/input_part1.q.xml +++ b/ql/src/test/results/compiler/plan/input_part1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml b/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml index 673e120..42ba507 100644 --- a/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml +++ b/ql/src/test/results/compiler/plan/input_testsequencefile.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/input_testxpath.q.xml b/ql/src/test/results/compiler/plan/input_testxpath.q.xml index acfad93..9b00d81 100644 --- a/ql/src/test/results/compiler/plan/input_testxpath.q.xml +++ b/ql/src/test/results/compiler/plan/input_testxpath.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/input_testxpath2.q.xml b/ql/src/test/results/compiler/plan/input_testxpath2.q.xml index 2eca8ce..31f7b8e 100644 --- a/ql/src/test/results/compiler/plan/input_testxpath2.q.xml +++ b/ql/src/test/results/compiler/plan/input_testxpath2.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/join1.q.xml b/ql/src/test/results/compiler/plan/join1.q.xml index 0c163cb..d3a0a48 100644 --- a/ql/src/test/results/compiler/plan/join1.q.xml +++ b/ql/src/test/results/compiler/plan/join1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + diff --git a/ql/src/test/results/compiler/plan/join2.q.xml b/ql/src/test/results/compiler/plan/join2.q.xml index 91656a6..74ce1d7 100644 --- a/ql/src/test/results/compiler/plan/join2.q.xml +++ b/ql/src/test/results/compiler/plan/join2.q.xml @@ -1,10 +1,10 @@ #### A masked pattern was here #### - + - + diff --git a/ql/src/test/results/compiler/plan/join3.q.xml b/ql/src/test/results/compiler/plan/join3.q.xml index 20104b0..2d5de44 100644 --- a/ql/src/test/results/compiler/plan/join3.q.xml +++ b/ql/src/test/results/compiler/plan/join3.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + diff --git a/ql/src/test/results/compiler/plan/join4.q.xml b/ql/src/test/results/compiler/plan/join4.q.xml index 5a0933f..0ec18b8 100644 --- a/ql/src/test/results/compiler/plan/join4.q.xml +++ b/ql/src/test/results/compiler/plan/join4.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/join5.q.xml b/ql/src/test/results/compiler/plan/join5.q.xml index 1fdfc26..16d91be 100644 --- a/ql/src/test/results/compiler/plan/join5.q.xml +++ b/ql/src/test/results/compiler/plan/join5.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/join6.q.xml b/ql/src/test/results/compiler/plan/join6.q.xml index c1d6666..efdde99 100644 --- a/ql/src/test/results/compiler/plan/join6.q.xml +++ b/ql/src/test/results/compiler/plan/join6.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/join7.q.xml b/ql/src/test/results/compiler/plan/join7.q.xml index 2fd3461..c0a1431 100644 --- a/ql/src/test/results/compiler/plan/join7.q.xml +++ b/ql/src/test/results/compiler/plan/join7.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/join8.q.xml b/ql/src/test/results/compiler/plan/join8.q.xml index 3930860..c727989 100644 --- a/ql/src/test/results/compiler/plan/join8.q.xml +++ b/ql/src/test/results/compiler/plan/join8.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/sample1.q.xml b/ql/src/test/results/compiler/plan/sample1.q.xml index c193e6b..03b7114 100644 --- a/ql/src/test/results/compiler/plan/sample1.q.xml +++ b/ql/src/test/results/compiler/plan/sample1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/sample2.q.xml b/ql/src/test/results/compiler/plan/sample2.q.xml index 7430a22..aa971fd 100644 --- a/ql/src/test/results/compiler/plan/sample2.q.xml +++ b/ql/src/test/results/compiler/plan/sample2.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/sample3.q.xml b/ql/src/test/results/compiler/plan/sample3.q.xml index bb5494e..4d1f63c 100644 --- a/ql/src/test/results/compiler/plan/sample3.q.xml +++ b/ql/src/test/results/compiler/plan/sample3.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/sample4.q.xml b/ql/src/test/results/compiler/plan/sample4.q.xml index 7430a22..aa971fd 100644 --- a/ql/src/test/results/compiler/plan/sample4.q.xml +++ b/ql/src/test/results/compiler/plan/sample4.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/sample5.q.xml b/ql/src/test/results/compiler/plan/sample5.q.xml index 4380d3e..12a11c4 100644 --- a/ql/src/test/results/compiler/plan/sample5.q.xml +++ b/ql/src/test/results/compiler/plan/sample5.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/sample6.q.xml b/ql/src/test/results/compiler/plan/sample6.q.xml index 841c7a8..343d07f 100644 --- a/ql/src/test/results/compiler/plan/sample6.q.xml +++ b/ql/src/test/results/compiler/plan/sample6.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/sample7.q.xml b/ql/src/test/results/compiler/plan/sample7.q.xml index 3599dca..9c83854 100644 --- a/ql/src/test/results/compiler/plan/sample7.q.xml +++ b/ql/src/test/results/compiler/plan/sample7.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -57,7 +57,7 @@ - + @@ -384,7 +384,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/subq.q.xml b/ql/src/test/results/compiler/plan/subq.q.xml index c3e7b83..91340e1 100644 --- a/ql/src/test/results/compiler/plan/subq.q.xml +++ b/ql/src/test/results/compiler/plan/subq.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -25,7 +25,7 @@ - + @@ -279,7 +279,7 @@ - + diff --git a/ql/src/test/results/compiler/plan/udf1.q.xml b/ql/src/test/results/compiler/plan/udf1.q.xml index 07fc0a4..01d5163 100644 --- a/ql/src/test/results/compiler/plan/udf1.q.xml +++ b/ql/src/test/results/compiler/plan/udf1.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/udf4.q.xml b/ql/src/test/results/compiler/plan/udf4.q.xml index dcb35b9..ebaa2f3 100644 --- a/ql/src/test/results/compiler/plan/udf4.q.xml +++ b/ql/src/test/results/compiler/plan/udf4.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/udf6.q.xml b/ql/src/test/results/compiler/plan/udf6.q.xml index f2dfdbc..65f96e1 100644 --- a/ql/src/test/results/compiler/plan/udf6.q.xml +++ b/ql/src/test/results/compiler/plan/udf6.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/udf_case.q.xml b/ql/src/test/results/compiler/plan/udf_case.q.xml index 70390d6..308732b 100644 --- a/ql/src/test/results/compiler/plan/udf_case.q.xml +++ b/ql/src/test/results/compiler/plan/udf_case.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/udf_when.q.xml b/ql/src/test/results/compiler/plan/udf_when.q.xml index 1447ddd..82a0dcf 100644 --- a/ql/src/test/results/compiler/plan/udf_when.q.xml +++ b/ql/src/test/results/compiler/plan/udf_when.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + Stage-1 diff --git a/ql/src/test/results/compiler/plan/union.q.xml b/ql/src/test/results/compiler/plan/union.q.xml index 7ae8331..1201336 100644 --- a/ql/src/test/results/compiler/plan/union.q.xml +++ b/ql/src/test/results/compiler/plan/union.q.xml @@ -1,6 +1,6 @@ #### A masked pattern was here #### - + @@ -25,7 +25,7 @@ - + @@ -279,7 +279,7 @@ - +