Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1199383) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -485,6 +485,9 @@ // Whether to delete the scratchdir while startup HIVE_START_CLEANUP_SCRATCHDIR("hive.start.cleanup.scratchdir", false), HIVE_INSERT_INTO_MULTILEVEL_DIRS("hive.insert.into.multilevel.dirs", false), + + //whether to execute the query or just check for parsing + HIVE_DRY_RUN("hive.dry.run", false), ; public final String varname; Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1199383) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -131,6 +131,9 @@ private int maxthreads; private static final int SLEEP_TIME = 2000; protected int tryCount = Integer.MAX_VALUE; + + // Dry-run indicator + private boolean isDryRun = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_DRY_RUN); private boolean checkLockManager() { boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY); @@ -416,6 +419,11 @@ ParseDriver pd = new ParseDriver(); ASTNode tree = pd.parse(command, ctx); tree = ParseUtils.findRootNonNullToken(tree); + + if(isDryRun) { + // If all we're doing is a dry-run, return success after syntax check. + return 0; + } BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree); List saHooks = getSemanticAnalyzerHooks(); @@ -892,6 +900,12 @@ return new CommandProcessorResponse(ret, errorMessage, SQLState); } + if(isDryRun) { + LOG.info("Dry run successful"); + releaseLocks(ctx.getHiveLocks()); + return new CommandProcessorResponse(ret); + } + boolean requireLock = false; boolean ckLock = checkLockManager();