diff --git cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index 8eb5c0181e..779fb66350 100644 --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -37,24 +37,10 @@ import java.util.Map; import java.util.Set; -import com.google.common.base.Splitter; - -import jline.console.ConsoleReader; -import jline.console.completer.Completer; -import jline.console.history.FileHistory; -import jline.console.history.History; -import jline.console.history.PersistentHistory; -import jline.console.completer.StringsCompleter; -import jline.console.completer.ArgumentCompleter; -import jline.console.completer.ArgumentCompleter.ArgumentDelimiter; -import jline.console.completer.ArgumentCompleter.AbstractArgumentDelimiter; - import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.cli.CliSessionState; -import org.apache.hadoop.hive.cli.OptionsProcessor; import org.apache.hadoop.hive.common.HiveInterruptUtils; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; @@ -64,10 +50,10 @@ import org.apache.hadoop.hive.common.io.FetchConverter; import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveVariableSource; import org.apache.hadoop.hive.conf.Validator; import org.apache.hadoop.hive.conf.VariableSubstitution; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -88,6 +74,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.base.Splitter; + +import jline.console.ConsoleReader; +import jline.console.completer.ArgumentCompleter; +import jline.console.completer.ArgumentCompleter.AbstractArgumentDelimiter; +import jline.console.completer.ArgumentCompleter.ArgumentDelimiter; +import jline.console.completer.Completer; +import jline.console.completer.StringsCompleter; +import jline.console.history.FileHistory; +import jline.console.history.History; +import jline.console.history.PersistentHistory; import sun.misc.Signal; import sun.misc.SignalHandler; @@ -119,7 +116,7 @@ public CliDriver() { console = new LogHelper(LOG); } - public int processCmd(String cmd) { + public CommandProcessorResponse processCmd(String cmd) { CliSessionState ss = (CliSessionState) SessionState.get(); ss.setLastCommand(cmd); @@ -129,7 +126,7 @@ public int processCmd(String cmd) { ss.err.flush(); String cmd_trimmed = HiveStringUtils.removeComments(cmd).trim(); String[] tokens = tokenizeCmd(cmd_trimmed); - int ret = 0; + CommandProcessorResponse response = new CommandProcessorResponse(0); if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) { @@ -151,14 +148,14 @@ public int processCmd(String cmd) { File sourceFile = new File(cmd_1); if (! sourceFile.isFile()){ console.printError("File: "+ cmd_1 + " is not a file."); - ret = 1; + response = new CommandProcessorResponse(1); } else { try { - ret = processFile(cmd_1); + response = processFile(cmd_1); } catch (IOException e) { console.printError("Failed processing file "+ cmd_1 +" "+ e.getLocalizedMessage(), stringifyException(e)); - ret = 1; + response = new CommandProcessorResponse(1); } } } else if (cmd_trimmed.startsWith("!")) { @@ -174,14 +171,14 @@ public int processCmd(String cmd) { // shell_cmd = "/bin/bash -c \'" + shell_cmd + "\'"; try { ShellCmdExecutor executor = new ShellCmdExecutor(shell_cmd, ss.out, ss.err); - ret = executor.execute(); - if (ret != 0) { - console.printError("Command failed with exit code = " + ret); + response = new CommandProcessorResponse(executor.execute()); + if (response.getResponseCode() != 0) { + console.printError("Command failed with exit code = " + response); } } catch (Exception e) { console.printError("Exception raised from Shell command " + e.getLocalizedMessage(), stringifyException(e)); - ret = 1; + response = new CommandProcessorResponse(1); } } else { // local mode try { @@ -189,15 +186,15 @@ public int processCmd(String cmd) { try (CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf)) { if (proc instanceof IDriver) { // Let Driver strip comments using sql parser - ret = processLocalCmd(cmd, proc, ss); + response = processLocalCmd(cmd, proc, ss); } else { - ret = processLocalCmd(cmd_trimmed, proc, ss); + response = processLocalCmd(cmd_trimmed, proc, ss); } } } catch (SQLException e) { console.printError("Failed processing command " + tokens[0] + " " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e)); - ret = 1; + response = new CommandProcessorResponse(1); } catch (Exception e) { throw new RuntimeException(e); @@ -205,7 +202,7 @@ public int processCmd(String cmd) { } ss.resetThreadName(); - return ret; + return response; } /** @@ -227,9 +224,9 @@ private String getFirstCmd(String cmd, int length) { return cmd.split("\\s+"); } - int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { + CommandProcessorResponse processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { boolean escapeCRLF = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_ESCAPE_CRLF); - int ret = 0; + CommandProcessorResponse response = new CommandProcessorResponse(0); if (proc != null) { if (proc instanceof IDriver) { @@ -242,12 +239,12 @@ int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { // Set HDFS CallerContext to queryId and reset back to sessionId after the query is done ShimLoader.getHadoopShims().setHadoopQueryContext(qp.getQueryState().getQueryId()); - ret = qp.run(cmd).getResponseCode(); + response = qp.run(cmd); - if (ret != 0) { + if (response.getResponseCode() != 0) { qp.close(); ShimLoader.getHadoopShims().setHadoopSessionContext(ss.getSessionId()); - return ret; + return response; } // query has run capture the time @@ -280,7 +277,7 @@ int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { } catch (IOException e) { console.printError("Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); - ret = 1; + response = new CommandProcessorResponse(1); } qp.close(); @@ -309,11 +306,10 @@ int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { console.printInfo(consoleMsg); } } - ret = res.getResponseCode(); + return res; } } - - return ret; + return response; } /** @@ -340,7 +336,7 @@ private void printHeader(IDriver qp, PrintStream out) { } } - public int processLine(String line) { + public CommandProcessorResponse processLine(String line) { return processLine(line, false); } @@ -354,7 +350,7 @@ public int processLine(String line) { * returning -1 * @return 0 if ok */ - public int processLine(String line, boolean allowInterrupting) { + public CommandProcessorResponse processLine(String line, boolean allowInterrupting) { SignalHandler oldSignal = null; Signal interruptSignal = null; @@ -390,29 +386,30 @@ public void handle(Signal signal) { } try { - int lastRet = 0, ret = 0; + CommandProcessorResponse lastRet = new CommandProcessorResponse(0); + CommandProcessorResponse ret; // we can not use "split" function directly as ";" may be quoted List commands = splitSemiColon(line); - String command = ""; + StringBuilder command = new StringBuilder(); for (String oneCmd : commands) { if (StringUtils.endsWith(oneCmd, "\\")) { - command += StringUtils.chop(oneCmd) + ";"; + command.append(StringUtils.chop(oneCmd) + ";"); continue; } else { - command += oneCmd; + command.append(oneCmd); } - if (StringUtils.isBlank(command)) { + if (StringUtils.isBlank(command.toString())) { continue; } - ret = processCmd(command); - command = ""; + ret = processCmd(command.toString()); + command.setLength(0);; lastRet = ret; boolean ignoreErrors = HiveConf.getBoolVar(conf, HiveConf.ConfVars.CLIIGNOREERRORS); - if (ret != 0 && !ignoreErrors) { + if (ret.getResponseCode() != 0 && !ignoreErrors) { return ret; } } @@ -477,7 +474,7 @@ public void handle(Signal signal) { return ret; } - public int processReader(BufferedReader r) throws IOException { + public CommandProcessorResponse processReader(BufferedReader r) throws IOException { String line; StringBuilder qsb = new StringBuilder(); @@ -491,7 +488,7 @@ public int processReader(BufferedReader r) throws IOException { return (processLine(qsb.toString())); } - public int processFile(String fileName) throws IOException { + public CommandProcessorResponse processFile(String fileName) throws IOException { Path path = new Path(fileName); FileSystem fs; if (!path.toUri().isAbsolute()) { @@ -501,34 +498,29 @@ public int processFile(String fileName) throws IOException { fs = FileSystem.get(path.toUri(), conf); } BufferedReader bufferReader = null; - int rc = 0; + try { - bufferReader = new BufferedReader(new InputStreamReader(fs.open(path))); - rc = processReader(bufferReader); + bufferReader = new BufferedReader(new InputStreamReader(fs.open(path), StandardCharsets.UTF_8)); + return processReader(bufferReader); } finally { IOUtils.closeStream(bufferReader); } - return rc; } public void processInitFiles(CliSessionState ss) throws IOException { boolean saveSilent = ss.getIsSilent(); ss.setIsSilent(true); for (String initFile : ss.initFiles) { - int rc = processFile(initFile); - if (rc != 0) { - System.exit(rc); - } + CommandProcessorResponse response = processFile(initFile); + exitOnFailure(response); } if (ss.initFiles.size() == 0) { if (System.getenv("HIVE_HOME") != null) { String hivercDefault = System.getenv("HIVE_HOME") + File.separator + "bin" + File.separator + HIVERCFILE; if (new File(hivercDefault).exists()) { - int rc = processFile(hivercDefault); - if (rc != 0) { - System.exit(rc); - } + CommandProcessorResponse response = processFile(hivercDefault); + exitOnFailure(response); console.printError("Putting the global hiverc in " + "$HIVE_HOME/bin/.hiverc is deprecated. Please "+ "use $HIVE_CONF_DIR/.hiverc instead."); @@ -538,33 +530,34 @@ public void processInitFiles(CliSessionState ss) throws IOException { String hivercDefault = System.getenv("HIVE_CONF_DIR") + File.separator + HIVERCFILE; if (new File(hivercDefault).exists()) { - int rc = processFile(hivercDefault); - if (rc != 0) { - System.exit(rc); - } + CommandProcessorResponse response = processFile(hivercDefault); + exitOnFailure(response); } } if (System.getProperty("user.home") != null) { String hivercUser = System.getProperty("user.home") + File.separator + HIVERCFILE; if (new File(hivercUser).exists()) { - int rc = processFile(hivercUser); - if (rc != 0) { - System.exit(rc); - } + CommandProcessorResponse response = processFile(hivercUser); + exitOnFailure(response); } } } ss.setIsSilent(saveSilent); } + private void exitOnFailure(CommandProcessorResponse response) { + int rc = response.getResponseCode(); + if (rc != 0) { + System.exit(rc); + } + } + public void processSelectDatabase(CliSessionState ss) throws IOException { String database = ss.database; if (database != null) { - int rc = processLine("use " + database + ";"); - if (rc != 0) { - System.exit(rc); - } + CommandProcessorResponse response = processLine("use " + database + ";"); + exitOnFailure(response); } } @@ -704,7 +697,7 @@ public static void main(String[] args) throws Exception { System.exit(ret); } - public int run(String[] args) throws Exception { + public int run(String[] args) throws Exception { OptionsProcessor oproc = new OptionsProcessor(); if (!oproc.process_stage1(args)) { @@ -781,7 +774,7 @@ public int run(String[] args) throws Exception { // execute cli driver work try { - return executeDriver(ss, conf, oproc); + return executeDriver(ss, conf, oproc).getResponseCode(); } finally { ss.resetThreadName(); ss.close(); @@ -796,7 +789,7 @@ public int run(String[] args) throws Exception { * @return status of the CLI command execution * @throws Exception */ - private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc) + private CommandProcessorResponse executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor oproc) throws Exception { CliDriver cli = new CliDriver(); @@ -809,8 +802,7 @@ private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor op cli.processInitFiles(ss); if (ss.execString != null) { - int cmdProcessStatus = cli.processLine(ss.execString); - return cmdProcessStatus; + return cli.processLine(ss.execString); } try { @@ -819,7 +811,7 @@ private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor op } } catch (FileNotFoundException e) { System.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); - return 3; + return new CommandProcessorResponse(3); } if ("mr".equals(HiveConf.getVar(conf, ConfVars.HIVE_EXECUTION_ENGINE))) { console.printInfo(HiveConf.generateMrDeprecationWarning()); @@ -828,34 +820,34 @@ private int executeDriver(CliSessionState ss, HiveConf conf, OptionsProcessor op setupConsoleReader(); String line; - int ret = 0; - String prefix = ""; + CommandProcessorResponse response = new CommandProcessorResponse(0); + StringBuilder prefix = new StringBuilder(); String curDB = getFormattedDb(conf, ss); String curPrompt = prompt + curDB; String dbSpaces = spacesForString(curDB); while ((line = reader.readLine(curPrompt + "> ")) != null) { - if (!prefix.equals("")) { - prefix += '\n'; + if (!prefix.toString().equals("")) { + prefix.append('\n'); } if (line.trim().startsWith("--")) { continue; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; - ret = cli.processLine(line, true); - prefix = ""; + response = cli.processLine(line, true); + prefix.setLength(0);; curDB = getFormattedDb(conf, ss); curPrompt = prompt + curDB; dbSpaces = dbSpaces.length() == curDB.length() ? dbSpaces : spacesForString(curDB); } else { - prefix = prefix + line; + prefix.append(line); curPrompt = prompt2 + dbSpaces; continue; } } - return ret; + return response; } private void setupCmdHistory() { diff --git cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java index 4a2bae5c7c..debcc7a781 100644 --- cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java +++ cli/src/test/org/apache/hadoop/hive/cli/TestCliDriverMethods.java @@ -124,12 +124,12 @@ public void testThatCliDriverDoesNotStripComments() throws Exception { // Save output as yo cannot print it while System.out and System.err are weird String message; String errors; - int ret; + CommandProcessorResponse response; try { CliSessionState.start(ss); CliDriver cliDriver = new CliDriver(); // issue a command with bad options - ret = cliDriver.processCmd("!ls --abcdefghijklmnopqrstuvwxyz123456789"); + response = cliDriver.processCmd("!ls --abcdefghijklmnopqrstuvwxyz123456789"); } finally { // restore System.out and System.err System.setOut(oldOut); @@ -138,7 +138,7 @@ public void testThatCliDriverDoesNotStripComments() throws Exception { message = dataOut.toString("UTF-8"); errors = dataErr.toString("UTF-8"); assertTrue("Comments with '--; should not have been stripped," - + " so command should fail", ret != 0); + + " so command should fail", response.getResponseCode() != 0); assertTrue("Comments with '--; should not have been stripped," + " so we should have got an error in the output: '" + errors + "'.", errors.contains("option")); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java index aa2c7a7ea3..6a15c00e98 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/AbstractCoreBlobstoreCliDriver.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hive.testutils.HiveTestEnvSetup; import org.junit.After; import org.junit.AfterClass; @@ -147,9 +148,9 @@ protected void runTestHelper(String tname, String fname, String fpath, boolean e qt.addFile(fpath); qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); - if ((ecode == 0) ^ expectSuccess) { - qt.failed(ecode, fname, debugHint); + CommandProcessorResponse response = qt.executeClient(fname); + if ((response.getResponseCode() == 0) ^ expectSuccess) { + qt.failedQuery(response.getException(), response.getResponseCode(), fname, debugHint); } QTestProcessExecResult result = qt.checkCliDriverResults(fname); @@ -160,7 +161,7 @@ protected void runTestHelper(String tname, String fname, String fpath, boolean e } } catch (Exception e) { - qt.failed(e, fname, debugHint); + qt.failedWithException(e, fname, debugHint); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java index 6bbcf3d474..9b5d5e3997 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreAccumuloCliDriver.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.accumulo.AccumuloTestSetup; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -109,9 +110,9 @@ public void runTest(String tname, String fname, String fpath) { qt.addFile(fpath); qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); - if (ecode != 0) { - qt.failed(ecode, fname, null); + CommandProcessorResponse response = qt.executeClient(fname); + if (response.getResponseCode() != 0) { + qt.failedQuery(response.getException(), response.getResponseCode(), fname, null); } QTestProcessExecResult result = qt.checkCliDriverResults(fname); @@ -121,7 +122,7 @@ public void runTest(String tname, String fname, String fpath) { qt.clearPostTestEffects(); } catch (Exception e) { - qt.failed(e, fname, null); + qt.failedWithException(e, fname, null); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java index 846547466d..2411843f76 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCliDriver.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.util.ElapsedTimeLoggingWrapper; import org.junit.After; import org.junit.AfterClass; @@ -183,10 +184,11 @@ public void runTest(String testName, String fname, String fpath) { qt.addFile(fpath); qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); + CommandProcessorResponse response = qt.executeClient(fname); + int ecode = response.getResponseCode(); if (ecode != 0) { failed = true; - qt.failed(ecode, fname, debugHint); + qt.failedQuery(response.getException(), response.getResponseCode(), fname, debugHint); } setupAdditionalPartialMasks(); @@ -201,7 +203,7 @@ public void runTest(String testName, String fname, String fpath) { } catch (Exception e) { failed = true; - qt.failed(e, fname, debugHint); + qt.failedWithException(e, fname, debugHint); } finally { String message = "Done query " + fname + ". succeeded=" + !failed + ", skipped=" + skipped + ". ElapsedTime(ms)=" + sw.stop().elapsed(TimeUnit.MILLISECONDS); diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java index 7a06768fda..2dfee134fc 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreCompareCliDriver.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -136,8 +137,6 @@ public void runTest(String tname, String fname, String fpath) { qt.addFile(new File(queryDirectory, versionFile), true); } - int ecode = 0; - qt.cliInit(new File(fpath)); List outputs = new ArrayList<>(versionFiles.size()); @@ -146,9 +145,9 @@ public void runTest(String tname, String fname, String fpath) { String versionStr = versionFile.substring(tname.length() + 1, versionFile.length() - 3); outputs.add(qt.cliInit(new File(queryDirectory, tname + "." + versionStr))); // TODO: will this work? - ecode = qt.executeClient(versionFile, fname); - if (ecode != 0) { - qt.failed(ecode, fname, debugHint); + CommandProcessorResponse response = qt.executeClient(versionFile, fname); + if (response.getResponseCode() != 0) { + qt.failedQuery(response.getException(), response.getResponseCode(), fname, debugHint); } } @@ -160,7 +159,7 @@ public void runTest(String tname, String fname, String fpath) { } } catch (Exception e) { - qt.failed(e, fname, debugHint); + qt.failedWithException(e, fname, debugHint); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java index 0d67768967..ec187d903d 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseCliDriver.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.hbase.HBaseTestSetup; import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -111,9 +112,9 @@ public void runTest(String tname, String fname, String fpath) { qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); - if (ecode != 0) { - qt.failed(ecode, fname, null); + CommandProcessorResponse response = qt.executeClient(fname); + if (response.getResponseCode() != 0) { + qt.failedQuery(response.getException(), response.getResponseCode(), fname, null); } QTestProcessExecResult result = qt.checkCliDriverResults(fname); @@ -122,7 +123,7 @@ public void runTest(String tname, String fname, String fpath) { } } catch (Exception e) { - qt.failed(e, fname, null); + qt.failedWithException(e, fname, null); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java index af170a9514..b65e80509c 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreHBaseNegativeCliDriver.java @@ -104,7 +104,7 @@ public void runTest(String tname, String fname, String fpath) { System.err.println("Begin query: " + fname); qt.addFile(fpath); qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); + int ecode = qt.executeClient(fname).getResponseCode(); if (ecode == 0) { qt.failed(fname, null); } @@ -115,7 +115,7 @@ public void runTest(String tname, String fname, String fpath) { } } catch (Exception e) { - qt.failed(e, fname, null); + qt.failedWithException(e, fname, null); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java index 91c3bf87b9..8f66b1f436 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CoreNegativeCliDriver.java @@ -125,7 +125,7 @@ public void runTest(String tname, String fname, String fpath) throws Exception { qt.addFile(fpath); qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); + int ecode = qt.executeClient(fname).getResponseCode(); if (ecode == 0) { qt.failed(fname, debugHint); } @@ -145,7 +145,7 @@ public void runTest(String tname, String fname, String fpath) throws Exception { } } catch (Exception e) { - qt.failed(e, fname, debugHint); + qt.failedWithException(e, fname, debugHint); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java index cf3d815441..81ce5cb01e 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java +++ itests/util/src/main/java/org/apache/hadoop/hive/cli/control/CorePerfCliDriver.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.QTestProcessExecResult; import org.apache.hadoop.hive.ql.QTestUtil; import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.junit.After; import org.junit.AfterClass; @@ -139,9 +140,10 @@ public void runTest(String name, String fname, String fpath) { qt.addFile(fpath); qt.cliInit(new File(fpath)); - int ecode = qt.executeClient(fname); + CommandProcessorResponse response = qt.executeClient(fname); + int ecode = response.getResponseCode(); if (ecode != 0) { - qt.failed(ecode, fname, debugHint); + qt.failedQuery(response.getException(), response.getResponseCode(), fname, debugHint); } QTestProcessExecResult result = qt.checkCliDriverResults(fname); @@ -151,7 +153,7 @@ public void runTest(String name, String fname, String fpath) { qt.failedDiff(result.getReturnCode(), fname, message); } } catch (Exception e) { - qt.failed(e, fname, debugHint); + qt.failedWithException(e, fname, debugHint); } long elapsedTime = System.currentTimeMillis() - startTime; diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java index 5da654a323..f77e40aedc 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql; import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME; + import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; @@ -1011,7 +1012,7 @@ private void cleanupFromFile() throws IOException { String cleanupCommands = readEntireFileIntoString(cleanupFile); LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands); - int result = getCliDriver().processLine(cleanupCommands); + int result = getCliDriver().processLine(cleanupCommands).getResponseCode(); if (result != 0) { LOG.error("Failed during cleanup processLine with code={}. Ignoring", result); // TODO Convert this to an Assert.fail once HIVE-14682 is fixed @@ -1050,7 +1051,7 @@ private void initFromScript() throws IOException { String initCommands = readEntireFileIntoString(scriptFile); LOG.info("Initial setup (" + initScript + "):\n" + initCommands); - int result = cliDriver.processLine(initCommands); + int result = cliDriver.processLine(initCommands).getResponseCode(); LOG.info("Result from cliDrriver.processLine in createSources=" + result); if (result != 0) { Assert.fail("Failed during createSources processLine with code=" + result); @@ -1087,7 +1088,7 @@ protected void initDataset(String table) throws Exception { throw new RuntimeException(String.format("dataset file not found %s", tableFile), e); } - int result = getCliDriver().processLine(commands); + int result = getCliDriver().processLine(commands).getResponseCode(); LOG.info("Result from cliDrriver.processLine in initFromDatasets=" + result); if (result != 0) { Assert.fail("Failed during initFromDatasets processLine with code=" + result); @@ -1239,60 +1240,60 @@ public int executeAdhocCommand(String q) { String q1 = q.split(";")[0] + ";"; LOG.debug("Executing " + q1); - return cliDriver.processLine(q1); + return cliDriver.processLine(q1).getResponseCode(); } public int execute(String tname) { return drv.run(qMap.get(tname)).getResponseCode(); } - public int executeClient(String tname1, String tname2) { + public CommandProcessorResponse executeClient(String tname1, String tname2) { String commands = getCommand(tname1) + CRLF + getCommand(tname2); return executeClientInternal(commands); } - public int executeClient(String fileName) { + public CommandProcessorResponse executeClient(String fileName) { return executeClientInternal(getCommand(fileName)); } - private int executeClientInternal(String commands) { + private CommandProcessorResponse executeClientInternal(String commands) { List cmds = CliDriver.splitSemiColon(commands); - int rc = 0; + CommandProcessorResponse response = new CommandProcessorResponse(0); - String command = ""; + StringBuilder command = new StringBuilder(); QTestSyntaxUtil qtsu = new QTestSyntaxUtil(this, conf, pd); qtsu.checkQFileSyntax(cmds); for (String oneCmd : cmds) { if (StringUtils.endsWith(oneCmd, "\\")) { - command += StringUtils.chop(oneCmd) + "\\;"; + command.append(StringUtils.chop(oneCmd) + "\\;"); continue; } else { if (isHiveCommand(oneCmd)) { - command = oneCmd; - } else { - command += oneCmd; + command.setLength(0); } + command.append(oneCmd); } - if (StringUtils.isBlank(command)) { + if (StringUtils.isBlank(command.toString())) { continue; } - if (isCommandUsedForTesting(command)) { - rc = executeTestCommand(command); + String strCommand = command.toString(); + if (isCommandUsedForTesting(strCommand)) { + response = executeTestCommand(strCommand); } else { - rc = cliDriver.processLine(command); + response = cliDriver.processLine(strCommand); } - if (rc != 0 && !ignoreErrors()) { + if (response.getResponseCode() != 0 && !ignoreErrors()) { break; } - command = ""; + command.setLength(0); } - if (rc == 0 && SessionState.get() != null) { + if (response.getResponseCode() == 0 && SessionState.get() != null) { SessionState.get().setLastCommand(null); // reset } - return rc; + return response; } /** @@ -1314,7 +1315,7 @@ boolean isHiveCommand(String command) { } } - private int executeTestCommand(final String command) { + private CommandProcessorResponse executeTestCommand(final String command) { String commandName = command.trim().split("\\s+")[0]; String commandArgs = command.trim().substring(commandName.length()); @@ -1347,7 +1348,7 @@ private int executeTestCommand(final String command) { response.getException() != null ? Throwables.getStackTraceAsString(response.getException()) : ""); } - return rc; + return response; } else { throw new RuntimeException("Could not get CommandProcessor for command: " + commandName); } @@ -1958,21 +1959,6 @@ private static void ensureQvFileList(String queryDir) { return result; } - public void failed(int ecode, String fname, String debugHint) { - String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null; - String - message = - "Client execution failed with error code = " - + ecode - + (command != null ? " running \"" + command : "") - + "\" fname=" - + fname - + " " - + (debugHint != null ? debugHint : ""); - LOG.error(message); - Assert.fail(message); - } - // for negative tests, which is succeeded.. no need to print the query string public void failed(String fname, String debugHint) { Assert.fail("Client Execution was expected to fail, but succeeded with error code 0 for fname=" + fname + (debugHint @@ -1992,7 +1978,18 @@ public void failedDiff(int ecode, String fname, String debugHint) { Assert.fail(message); } - public void failed(Exception e, String fname, String debugHint) { + public void failedQuery(Throwable e, int ecode, String fname, String debugHint) { + String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null; + + String message = String.format( + "Client execution failed with error code = %d %nrunning %s %nfname=%s%n%s%n %s", ecode, + command != null ? command : "", fname, debugHint != null ? debugHint : "", + e == null ? "" : org.apache.hadoop.util.StringUtils.stringifyException(e)); + LOG.error(message); + Assert.fail(message); + } + + public void failedWithException(Exception e, String fname, String debugHint) { String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null; System.err.println("Failed query: " + fname); System.err.flush(); diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java index a7972ecea3..6eb20fe497 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/parse/CoreParseNegative.java @@ -126,7 +126,7 @@ public void runTest(String tname, String fname, String fpath) throws Exception { catch (ParseException pe) { QTestProcessExecResult result = qt.checkNegativeResults(fname, pe); if (result.getReturnCode() != 0) { - qt.failed(result.getReturnCode(), fname, result.getCapturedOutput() + "\r\n" + debugHint); + qt.failedQuery(null, result.getReturnCode(), fname, result.getCapturedOutput() + "\r\n" + debugHint); } } catch (SemanticException se) { @@ -138,7 +138,7 @@ public void runTest(String tname, String fname, String fpath) throws Exception { } } catch (Exception e) { - qt.failed(e, fname, debugHint); + qt.failedWithException(e, fname, debugHint); } long elapsedTime = System.currentTimeMillis() - startTime;