diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 7538d18c51..a3aca6d390 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; +import static org.junit.Assert.assertTrue; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -96,6 +97,7 @@ import org.apache.hadoop.hive.ql.exec.spark.session.SparkSession; import org.apache.hadoop.hive.ql.exec.spark.session.SparkSessionManagerImpl; import org.apache.hadoop.hive.ql.exec.tez.TezSessionState; +import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; import org.apache.hadoop.hive.ql.lockmgr.zookeeper.CuratorFrameworkSingleton; import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager; import org.apache.hadoop.hive.ql.metadata.Hive; @@ -110,14 +112,25 @@ import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.mapper.StatsSources; +import org.apache.hadoop.hive.ql.processors.AddResourceProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessor; import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.processors.CompileProcessor; +import org.apache.hadoop.hive.ql.processors.CryptoProcessor; +import org.apache.hadoop.hive.ql.processors.DfsProcessor; import org.apache.hadoop.hive.ql.processors.HiveCommand; +import org.apache.hadoop.hive.ql.processors.ListResourceProcessor; +import org.apache.hadoop.hive.ql.processors.LlapCacheResourceProcessor; +import org.apache.hadoop.hive.ql.processors.LlapClusterResourceProcessor; +import org.apache.hadoop.hive.ql.processors.ReloadProcessor; +import org.apache.hadoop.hive.ql.processors.ResetProcessor; +import org.apache.hadoop.hive.ql.processors.SetProcessor; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.HadoopShims.HdfsErasureCodingShim; import org.apache.hadoop.hive.shims.ShimLoader; +import org.apache.hive.common.util.HiveStringUtils; import org.apache.hive.common.util.StreamPrinter; import org.apache.hive.druid.MiniDruidCluster; import org.apache.hive.kafka.SingleNodeKafkaCluster; @@ -1261,6 +1274,28 @@ private int executeClientInternal(String commands) { int rc = 0; String command = ""; + + if (shouldCheckSyntax()) { + //check syntax first + for (String oneCmd : cmds) { + if (StringUtils.endsWith(oneCmd, "\\")) { + command += StringUtils.chop(oneCmd) + "\\;"; + continue; + } else { + if (isHiveCommand(oneCmd)) { + command = oneCmd; + } else { + command += oneCmd; + } + } + if (StringUtils.isBlank(command)) { + continue; + } + assertTrue("Syntax error in command: " + command, checkSyntax(command)); + command = ""; + } + } + for (String oneCmd : cmds) { if (StringUtils.endsWith(oneCmd, "\\")) { command += StringUtils.chop(oneCmd) + "\\;"; @@ -1293,7 +1328,106 @@ private int executeClientInternal(String commands) { return rc; } - /** + private boolean shouldCheckSyntax() { + return "true".equalsIgnoreCase(System.getProperty("test.check.syntax")); + } + + private boolean checkSyntax(String cmd) { + ASTNode tree; + int ret = 0; + CliSessionState ss = (CliSessionState) SessionState.get(); + + String cmdTrimmed = HiveStringUtils.removeComments(cmd).trim(); + String[] tokens = cmdTrimmed.split("\\s+"); + if (tokens[0].equalsIgnoreCase("source")) { + return true; + } + if (cmdTrimmed.toLowerCase().equals("quit") || cmdTrimmed.toLowerCase().equals("exit")) { + return true; + } + if (cmdTrimmed.startsWith("!")) { + return true; + } + try { + CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf); + if (proc instanceof IDriver) { + try { + Context ctx = new Context(conf); + HiveTxnManager queryTxnMgr = SessionState.get().initTxnMgr(conf); + ctx.setHiveTxnManager(queryTxnMgr); + ctx.setCmd(cmd); + ctx.setHDFSCleanup(true); + tree = pd.parse(cmd, ctx); + analyzeAST(tree); + } catch (Exception e) { + return false; + } + } else { + ret = processLocalCmd(cmdTrimmed, proc, ss); + } + } catch (SQLException e) { + e.printStackTrace(); + return false; + } + if (ret != 0) { + return false; + } + return true; + } + + private int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { + int ret = 0; + if (proc != null) { + String firstToken = cmd.trim().split("\\s+")[0]; + String cmd1 = cmd.trim().substring(firstToken.length()).trim(); + //CommandProcessorResponse res = proc.run(cmd_1); + if (proc instanceof ResetProcessor || + proc instanceof CompileProcessor || + proc instanceof ReloadProcessor || + proc instanceof CryptoProcessor || + proc instanceof AddResourceProcessor || + proc instanceof ListResourceProcessor || + proc instanceof LlapClusterResourceProcessor || + proc instanceof LlapCacheResourceProcessor) { + if (cmd1.trim().split("\\s+").length < 1) { + ret = -1; + } + } + if (proc instanceof SetProcessor) { + if (!cmd1.contains("=")) { + ret = -1; + } + } + if (proc instanceof DfsProcessor) { + String[] argv = cmd1.trim().split("\\s+"); + if ("-put".equals(firstToken) || "-test".equals(firstToken) || + "-copyFromLocal".equals(firstToken) || "-moveFromLocal".equals(firstToken)) { + if (argv.length < 3) { + ret = -1; + } + } else if ("-get".equals(firstToken) || + "-copyToLocal".equals(firstToken) || "-moveToLocal".equals(firstToken)) { + if (argv.length < 3) { + ret = -1; + } + } else if ("-mv".equals(firstToken) || "-cp".equals(firstToken)) { + if (argv.length < 3) { + ret = -1; + } + } else if ("-rm".equals(firstToken) || "-rmr".equals(firstToken) || + "-cat".equals(firstToken) || "-mkdir".equals(firstToken) || + "-touchz".equals(firstToken) || "-stat".equals(firstToken) || + "-text".equals(firstToken)) { + if (argv.length < 2) { + ret = -1; + } + } + } + } + return ret; + } + +/** * This allows a .q file to continue executing after a statement runs into an error which is convenient * if you want to use another hive cmd after the failure to sanity check the state of the system. */