Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1220880) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -552,6 +552,10 @@ // Whether to delete the scratchdir while startup HIVE_START_CLEANUP_SCRATCHDIR("hive.start.cleanup.scratchdir", false), HIVE_INSERT_INTO_MULTILEVEL_DIRS("hive.insert.into.multilevel.dirs", false), + + //whether to execute only a part the query as a dry-run + // valid values are {off,parse,analyze,plan} + HIVE_DRY_RUN("hive.dry.run", "off"), ; public final String varname; Index: ql/src/test/results/clientnegative/dryrun_bad_parse.q.out =================================================================== --- ql/src/test/results/clientnegative/dryrun_bad_parse.q.out (revision 0) +++ ql/src/test/results/clientnegative/dryrun_bad_parse.q.out (revision 0) @@ -0,0 +1,2 @@ +FAILED: Parse Error: line 2:0 cannot recognize input near 'bork' 'bork' 'bork' + Index: ql/src/test/results/clientnegative/dryrun_nonexistant_table.q.out =================================================================== --- ql/src/test/results/clientnegative/dryrun_nonexistant_table.q.out (revision 0) +++ ql/src/test/results/clientnegative/dryrun_nonexistant_table.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: Line 2:21 Table not found 'non_existant_table_nope_nope_nope' Index: ql/src/test/results/clientnegative/dryrun_bad_fetch_serde.q.out =================================================================== --- ql/src/test/results/clientnegative/dryrun_bad_fetch_serde.q.out (revision 0) +++ ql/src/test/results/clientnegative/dryrun_bad_fetch_serde.q.out (revision 0) @@ -0,0 +1,2 @@ +#### A masked pattern was here #### + Index: ql/src/test/results/clientpositive/dryrun4.q.out =================================================================== Index: ql/src/test/results/clientpositive/dryrun_nonexistant_table_parse_ok.q.out =================================================================== Index: ql/src/test/results/clientpositive/dryrun_bad_fetch_serde_analyze_ok.q.out =================================================================== Index: ql/src/test/results/clientpositive/dryrun1.q.out =================================================================== --- ql/src/test/results/clientpositive/dryrun1.q.out (revision 0) +++ ql/src/test/results/clientpositive/dryrun1.q.out (revision 0) @@ -0,0 +1,9 @@ +PREHOOK: query: select count(1) from srcbucket +PREHOOK: type: QUERY +PREHOOK: Input: default@srcbucket +#### A masked pattern was here #### +POSTHOOK: query: select count(1) from srcbucket +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcbucket +#### A masked pattern was here #### +1000 Index: ql/src/test/results/clientpositive/dryrun2.q.out =================================================================== Index: ql/src/test/results/clientpositive/dryrun3.q.out =================================================================== Index: ql/src/test/queries/clientnegative/dryrun_bad_fetch_serde.q =================================================================== --- ql/src/test/queries/clientnegative/dryrun_bad_fetch_serde.q (revision 0) +++ ql/src/test/queries/clientnegative/dryrun_bad_fetch_serde.q (revision 0) @@ -0,0 +1,5 @@ +set hive.fetch.output.serde=org.apache.blah.blah.Blah; +set hive.dry.run=analyze; +select * from srcbucket; +set hive.dry.run=plan; +select * from srcbucket; Index: ql/src/test/queries/clientnegative/dryrun_nonexistant_table.q =================================================================== --- ql/src/test/queries/clientnegative/dryrun_nonexistant_table.q (revision 0) +++ ql/src/test/queries/clientnegative/dryrun_nonexistant_table.q (revision 0) @@ -0,0 +1,4 @@ +set hive.dry.run=parse; +select count(1) from non_existant_table_nope_nope_nope; +set hive.dry.run=analyze; +select count(1) from non_existant_table_nope_nope_nope; Index: ql/src/test/queries/clientnegative/dryrun_bad_parse.q =================================================================== --- ql/src/test/queries/clientnegative/dryrun_bad_parse.q (revision 0) +++ ql/src/test/queries/clientnegative/dryrun_bad_parse.q (revision 0) @@ -0,0 +1,2 @@ +set hive.dry.run=parse; +bork bork bork; Index: ql/src/test/queries/clientpositive/dryrun_nonexistant_table_parse_ok.q =================================================================== --- ql/src/test/queries/clientpositive/dryrun_nonexistant_table_parse_ok.q (revision 0) +++ ql/src/test/queries/clientpositive/dryrun_nonexistant_table_parse_ok.q (revision 0) @@ -0,0 +1,2 @@ +set hive.dry.run=parse; +select count(1) from non_existant_table_nope_nope_nope; Index: ql/src/test/queries/clientpositive/dryrun_bad_fetch_serde_analyze_ok.q =================================================================== --- ql/src/test/queries/clientpositive/dryrun_bad_fetch_serde_analyze_ok.q (revision 0) +++ ql/src/test/queries/clientpositive/dryrun_bad_fetch_serde_analyze_ok.q (revision 0) @@ -0,0 +1,3 @@ +set hive.fetch.output.serde=org.apache.blah.blah.Blah; +set hive.dry.run=analyze; +select * from srcbucket; Index: ql/src/test/queries/clientpositive/dryrun1.q =================================================================== --- ql/src/test/queries/clientpositive/dryrun1.q (revision 0) +++ ql/src/test/queries/clientpositive/dryrun1.q (revision 0) @@ -0,0 +1,2 @@ +set hive.dry.run=off; +select count(1) from srcbucket; Index: ql/src/test/queries/clientpositive/dryrun2.q =================================================================== --- ql/src/test/queries/clientpositive/dryrun2.q (revision 0) +++ ql/src/test/queries/clientpositive/dryrun2.q (revision 0) @@ -0,0 +1,2 @@ +set hive.dry.run=parse; +select count(1) from srcbucket; Index: ql/src/test/queries/clientpositive/dryrun3.q =================================================================== --- ql/src/test/queries/clientpositive/dryrun3.q (revision 0) +++ ql/src/test/queries/clientpositive/dryrun3.q (revision 0) @@ -0,0 +1,2 @@ +set hive.dry.run=analyze; +select count(1) from srcbucket; Index: ql/src/test/queries/clientpositive/dryrun4.q =================================================================== --- ql/src/test/queries/clientpositive/dryrun4.q (revision 0) +++ ql/src/test/queries/clientpositive/dryrun4.q (revision 0) @@ -0,0 +1,2 @@ +set hive.dry.run=plan; +select count(1) from srcbucket; Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1220880) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -131,7 +131,32 @@ private int maxthreads; private static final int SLEEP_TIME = 2000; protected int tryCount = Integer.MAX_VALUE; + + public enum DryRunMode{ + OFF("off"), + PARSE("parse"), + ANALYZE("analyze"), + PLAN("plan") + ; + + private String mode; + private DryRunMode(String dryRunMode){ + mode = dryRunMode; + } + @Override + public String toString(){ + return mode; + } + public boolean equals(String another){ + return mode.equals(another); + } + + } + + // Dry-run indicator + private String dryRunMode; + private boolean checkLockManager() { boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY); if (!supportConcurrency) { @@ -394,6 +419,9 @@ PerfLogger perfLogger = PerfLogger.getPerfLogger(); perfLogger.PerfLogBegin(LOG, PerfLogger.COMPILE); + // figure out if this is a dry run, and if so, what level + dryRunMode = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_DRY_RUN); + //holder for parent command type/string when executing reentrant queries QueryState queryState = new QueryState(); @@ -416,7 +444,18 @@ ParseDriver pd = new ParseDriver(); ASTNode tree = pd.parse(command, ctx); tree = ParseUtils.findRootNonNullToken(tree); + + if(DryRunMode.PARSE.equals(dryRunMode)) { + // return success after syntax check. + LOG.info("Dry run : PARSE done"); + // we still need to set the schema for the result, + // albeit based on a null semantic analyzer + schema = getSchema(null, conf); + + return 0; + } + BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree); List saHooks = getSemanticAnalyzerHooks(); @@ -437,6 +476,16 @@ LOG.info("Semantic Analysis Completed"); + if(DryRunMode.ANALYZE.equals(dryRunMode)) { + // we still need to define a schema for result + // if we're returning before the plan stage. + schema = getSchema(sem, conf); + + // return success after semantic analysis. + LOG.info("Dry run : ANALYZE done"); + return 0; + } + // validate the plan sem.validate(); @@ -892,6 +941,12 @@ return new CommandProcessorResponse(ret, errorMessage, SQLState); } + if(!DryRunMode.OFF.equals(dryRunMode)) { + LOG.info("Dry run successful."); + releaseLocks(ctx.getHiveLocks()); + return new CommandProcessorResponse(ret); + } + boolean requireLock = false; boolean ckLock = checkLockManager();