Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (revision 900350) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java (working copy) @@ -21,9 +21,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.JavaUtils; -import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.FunctionWork; import org.apache.hadoop.hive.ql.plan.createFunctionDesc; @@ -39,17 +36,10 @@ private static final long serialVersionUID = 1L; private static final Log LOG = LogFactory.getLog("hive.ql.exec.FunctionTask"); - transient HiveConf conf; - public FunctionTask() { super(); } - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { - super.initialize(conf, queryPlan, ctx); - this.conf = conf; - } - @Override public int execute() { createFunctionDesc createFunctionDesc = work.getCreateFunctionDesc(); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (revision 900350) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchTask.java (working copy) @@ -44,14 +44,15 @@ private int maxRows = 100; private FetchOperator ftOp; - private LazySimpleSerDe mSerde; - private int totalRows; - - public FetchTask() { - super(); - } + private LazySimpleSerDe mSerde; + private int totalRows; + + public FetchTask() { + super(); + } - public void initialize (HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { + @Override + public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { super.initialize(conf, queryPlan, ctx); try { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (revision 900350) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Task.java (working copy) @@ -68,7 +68,7 @@ this.taskCounters = new HashMap(); } - public void initialize (HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) { + public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) { this.queryPlan = queryPlan; isdone = false; started = false; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java (revision 900350) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ConditionalTask.java (working copy) @@ -21,9 +21,7 @@ import java.io.Serializable; import java.util.List; -import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.plan.ConditionalResolver; import org.apache.hadoop.hive.ql.plan.ConditionalWork; import org.apache.hadoop.hive.ql.plan.api.StageType; @@ -63,10 +61,6 @@ return false; } - public void initialize (HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) { - super.initialize(conf, queryPlan, driverContext); - } - @Override public int execute() { resTasks = resolver.getTasks(conf, resolverCtx); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 900350) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -18,9 +18,19 @@ package org.apache.hadoop.hive.ql.exec; -import java.io.*; import java.text.SimpleDateFormat; -import java.util.*; +import java.util.ArrayList; +import java.util.Calendar; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Random; +import java.util.Set; import java.net.URI; import java.net.URLEncoder; import java.net.URLDecoder; @@ -31,7 +41,6 @@ import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.io.*; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.Counters; @@ -51,15 +60,26 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; -import org.apache.hadoop.hive.ql.io.*; +import org.apache.hadoop.hive.ql.io.HiveKey; +import org.apache.hadoop.hive.ql.io.HiveOutputFormat; +import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.io.BytesWritable; +import org.apache.hadoop.io.Text; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.varia.NullAppender; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.Serializable; +import java.io.UnsupportedEncodingException; import java.lang.ClassNotFoundException; -import org.apache.hadoop.hive.common.FileUtils; + public class ExecDriver extends Task implements Serializable { private static final long serialVersionUID = 1L; @@ -70,13 +90,11 @@ transient protected boolean success = false; // if job execution is successful public static Random randGen = new Random(); - /** - * Constructor when invoked from QL - */ + public ExecDriver() { super(); } - + public static String getResourceFiles(Configuration conf, SessionState.ResourceType t) { // fill in local files to be added to the task environment SessionState ss = SessionState.get(); @@ -107,8 +125,9 @@ /** * Initialization when invoked from QL */ - public void initialize(HiveConf conf, QueryPlan queryPlan) { - super.initialize(conf, queryPlan, null); + @Override + public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) { + super.initialize(conf, queryPlan, driverContext); job = new JobConf(conf, ExecDriver.class); // NOTE: initialize is only called if it is in non-local mode. // In case it's in non-local mode, we need to move the SessionState files Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (revision 900350) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (working copy) @@ -95,19 +95,13 @@ private static final long serialVersionUID = 1L; static final private Log LOG = LogFactory.getLog("hive.ql.exec.DDLTask"); - transient HiveConf conf; static final private int separator = Utilities.tabCode; static final private int terminator = Utilities.newLineCode; public DDLTask() { super(); } - - public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext ctx) { - super.initialize(conf, queryPlan, ctx); - this.conf = conf; - } - + public int execute() { // Create the db