Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 762134) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -40,7 +40,6 @@ import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.history.HiveHistory; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.plan.tableDesc; import org.apache.hadoop.hive.serde2.ByteStream; @@ -55,7 +54,6 @@ private int maxRows = 100; ByteStream.Output bos = new ByteStream.Output(); - private ParseDriver pd; private HiveConf conf; private DataInput resStream; private LogHelper console; @@ -154,33 +152,16 @@ gc.get(Calendar.MINUTE), gc.get(Calendar.SECOND)); } - - public int run(String command) { + public int compile(String command) { + TaskFactory.resetId(); - boolean noName = StringUtils.isEmpty(conf - .getVar(HiveConf.ConfVars.HADOOPJOBNAME)); - int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH); - int jobs = 0; - - conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, command); - - String queryId = makeQueryId(); - conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId); - try { - - TaskFactory.resetId(); - LOG.info("Starting command: " + command); - ctx.clear(); ctx.makeScratchDir(); - if (SessionState.get() != null) - SessionState.get().getHiveHistory().startQuery(command, conf.getVar(HiveConf.ConfVars.HIVEQUERYID) ); - resStream = null; - pd = new ParseDriver(); + ParseDriver pd = new ParseDriver(); ASTNode tree = pd.parse(command); while ((tree.getToken() == null) && (tree.getChildCount() > 0)) { @@ -192,8 +173,55 @@ // Do semantic analysis and plan generation sem.analyze(tree, ctx); LOG.info("Semantic Analysis Completed"); + } catch (SemanticException e) { + console.printError("FAILED: Error in semantic analysis: " + + e.getMessage(), "\n" + + org.apache.hadoop.util.StringUtils.stringifyException(e)); + return (10); + } catch (ParseException e) { + console.printError("FAILED: Parse Error: " + e.getMessage(), "\n" + + org.apache.hadoop.util.StringUtils.stringifyException(e)); + return (11); + } catch (Exception e) { + console.printError("FAILED: Unknown exception : " + e.getMessage(), "\n" + + org.apache.hadoop.util.StringUtils.stringifyException(e)); + return (12); + } - jobs = countJobs(sem.getRootTasks()); + return (0); + } + + public BaseSemanticAnalyzer getQuery() { + return sem; + } + + public int run(String command) { + int ret = compile(command); + if (ret != 0) + return (ret); + + return execute(command); + } + + public int execute(String command) { + boolean noName = StringUtils.isEmpty(conf + .getVar(HiveConf.ConfVars.HADOOPJOBNAME)); + int maxlen = conf.getIntVar(HiveConf.ConfVars.HIVEJOBNAMELENGTH); + + conf.setVar(HiveConf.ConfVars.HIVEQUERYSTRING, command); + + String queryId = makeQueryId(); + conf.setVar(HiveConf.ConfVars.HIVEQUERYID, queryId); + + try { + LOG.info("Starting command: " + command); + + if (SessionState.get() != null) + SessionState.get().getHiveHistory().startQuery(command, conf.getVar(HiveConf.ConfVars.HIVEQUERYID) ); + + resStream = null; + + int jobs = countJobs(sem.getRootTasks()); if (jobs > 0) { console.printInfo("Total MapReduce jobs = " + jobs); } @@ -216,7 +244,7 @@ rootTask.initialize(conf); } - // A very simple runtime that keeps putting runnable takss + // A very simple runtime that keeps putting runnable tasks // on a list and when a job completes, it puts the children at the back of // the list // while taking the job to run from the front of the list @@ -269,21 +297,6 @@ Keys.QUERY_RET_CODE, String.valueOf(0)); SessionState.get().getHiveHistory().printRowCount(queryId); } - } catch (SemanticException e) { - if (SessionState.get() != null) - SessionState.get().getHiveHistory().setQueryProperty(queryId, - Keys.QUERY_RET_CODE, String.valueOf(10)); - console.printError("FAILED: Error in semantic analysis: " - + e.getMessage(), "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return (10); - } catch (ParseException e) { - if (SessionState.get() != null) - SessionState.get().getHiveHistory().setQueryProperty(queryId, - Keys.QUERY_RET_CODE, String.valueOf(11)); - console.printError("FAILED: Parse Error: " + e.getMessage(), "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return (11); } catch (Exception e) { if (SessionState.get() != null) SessionState.get().getHiveHistory().setQueryProperty(queryId,