diff --git hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java index 729a5e7f62..7ec89b6c93 100644 --- hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java +++ hcatalog/server-extensions/src/test/java/org/apache/hive/hcatalog/listener/TestMsgBusConnection.java @@ -115,6 +115,6 @@ public void testConnection() throws Exception { private void runQuery(String query) throws Exception { CommandProcessorResponse cpr = driver.run(query); - assertFalse(cpr.getMessage(), cpr.failed()); + assertFalse(cpr.getErrorMessage(), cpr.failed()); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 00b21d530c..4a87a72c92 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -112,6 +112,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper; import org.apache.hadoop.hive.ql.plan.mapper.StatsSource; +import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.authorization.command.CommandAuthorizer; import org.apache.hadoop.hive.ql.session.LineageState; @@ -343,7 +344,7 @@ public int compile(String command, boolean resetTaskIds) { try { compile(command, resetTaskIds, false); return 0; - } catch (CommandProcessorResponse cpr) { + } catch (CommandProcessorException cpr) { return cpr.getErrorCode(); } } @@ -352,7 +353,7 @@ public int compile(String command, boolean resetTaskIds) { // interrupted, it should be set to true if the compile is called within another method like // runInternal, which defers the close to the called in that method. @VisibleForTesting - public void compile(String command, boolean resetTaskIds, boolean deferClose) throws CommandProcessorResponse { + public void compile(String command, boolean resetTaskIds, boolean deferClose) throws CommandProcessorException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.COMPILE); driverState.lock(); @@ -362,6 +363,8 @@ public void compile(String command, boolean resetTaskIds, boolean deferClose) th driverState.unlock(); } + schema = null; + command = new VariableSubstitution(new HiveVariableSource() { @Override public Map getHiveVariable() { @@ -565,7 +568,7 @@ public void run() { } } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() + ". Use SHOW GRANT to get more details."); - throw createProcessorResponse(403, authExp.getMessage(), "42000", null); + throw createProcessorException(403, authExp.getMessage(), "42000", null); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); } @@ -584,8 +587,8 @@ public void run() { } } } - } catch (CommandProcessorResponse cpr) { - throw cpr; + } catch (CommandProcessorException cpe) { + throw cpe; } catch (Exception e) { checkInterrupted("during query compilation: " + e.getMessage(), null, null); @@ -608,7 +611,7 @@ public void run() { } console.printError(errorMessage, "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(error.getErrorCode(), errorMessage, error.getSQLState(), e); + throw createProcessorException(error.getErrorCode(), errorMessage, error.getSQLState(), e); } finally { // Trigger post compilation hook. Note that if the compilation fails here then // before/after execution hook will never be executed. @@ -773,7 +776,7 @@ private void setLastReplIdForDump(HiveConf conf) throws HiveException, TExceptio LOG.debug("Setting " + ReplUtils.LAST_REPL_ID_KEY + " = " + lastReplId); } - private void openTransaction() throws LockException, CommandProcessorResponse { + private void openTransaction() throws LockException, CommandProcessorException { if (checkConcurrency() && startImplicitTxn(queryTxnMgr) && !queryTxnMgr.isTxnOpen()) { String userFromUGI = getUserFromUGI(); queryTxnMgr.openTxn(ctx, userFromUGI); @@ -832,7 +835,8 @@ private boolean startImplicitTxn(HiveTxnManager txnManager) throws LockException return shouldOpenImplicitTxn; } - private void checkInterrupted(String msg, HookContext hookContext, PerfLogger perfLogger) throws CommandProcessorResponse { + private void checkInterrupted(String msg, HookContext hookContext, PerfLogger perfLogger) + throws CommandProcessorException { if (driverState.isAborted()) { String errorMessage = "FAILED: command has been interrupted: " + msg; console.printError(errorMessage); @@ -843,7 +847,7 @@ private void checkInterrupted(String msg, HookContext hookContext, PerfLogger pe LOG.warn("Caught exception attempting to invoke Failure Hooks", e); } } - throw createProcessorResponse(1000, errorMessage, "HY008", null); + throw createProcessorException(1000, errorMessage, "HY008", null); } } @@ -1050,7 +1054,7 @@ private void addTableFromEntity(Entity entity, Collection tableList) { return result; } - private String getUserFromUGI() throws CommandProcessorResponse { + private String getUserFromUGI() throws CommandProcessorException { // Don't use the userName member, as it may or may not have been set. Get the value from // conf, which calls into getUGI to figure out who the process is running as. try { @@ -1058,7 +1062,7 @@ private String getUserFromUGI() throws CommandProcessorResponse { } catch (IOException e) { String errorMessage = "FAILED: Error in determining user while acquiring locks: " + e.getMessage(); console.printError(errorMessage, "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(10, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); + throw createProcessorException(10, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } } @@ -1069,9 +1073,9 @@ private String getUserFromUGI() throws CommandProcessorResponse { * * This method also records the list of valid transactions. This must be done after any * transactions have been opened. - * @throws CommandProcessorResponse + * @throws CommandProcessorException **/ - private void acquireLocks() throws CommandProcessorResponse { + private void acquireLocks() throws CommandProcessorException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ACQUIRE_READ_WRITE_LOCKS); @@ -1151,7 +1155,7 @@ private void acquireLocks() throws CommandProcessorResponse { } catch (Exception e) { String errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage(); console.printError(errorMessage, "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(10, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); + throw createProcessorException(10, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ACQUIRE_READ_WRITE_LOCKS); } @@ -1168,8 +1172,7 @@ public void releaseLocksAndCommitOrRollback(boolean commit) throws LockException * **/ @VisibleForTesting - public void releaseLocksAndCommitOrRollback(boolean commit, HiveTxnManager txnManager) - throws LockException { + public void releaseLocksAndCommitOrRollback(boolean commit, HiveTxnManager txnManager) throws LockException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.RELEASE_LOCKS); HiveTxnManager txnMgr; @@ -1236,8 +1239,9 @@ public CommandProcessorResponse run(String command, boolean alreadyCompiled) { try { runInternal(command, alreadyCompiled); - return createProcessorResponse(0, null, null, null); - } catch (CommandProcessorResponse cpr) { + return CommandProcessorResponse.createSuccessful(schema); + } catch (CommandProcessorException cpe) { + CommandProcessorResponse cpr = CommandProcessorResponse.createFailure(cpe, schema); SessionState ss = SessionState.get(); if (ss == null) { return cpr; @@ -1253,28 +1257,28 @@ public CommandProcessorResponse run(String command, boolean alreadyCompiled) { * the error is a specific/expected one. * It's written to stdout for backward compatibility (WebHCat consumes it).*/ try { - if (cpr.getException() == null) { - mdf.error(ss.out, cpr.getErrorMessage(), cpr.getResponseCode(), cpr.getSQLState()); + if (cpe.getException() == null) { + mdf.error(ss.out, cpe.getErrorMessage(), cpe.getResponseCode(), cpe.getSqlState()); return cpr; } - ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode()); + ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpe.getResponseCode()); if (canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) { /*Some HiveExceptions (e.g. SemanticException) don't set canonical ErrorMsg explicitly, but there is logic (e.g. #compile()) to find an appropriate canonical error and return its code as error code. In this case we want to preserve it for downstream code to interpret*/ - mdf.error(ss.out, cpr.getErrorMessage(), cpr.getResponseCode(), cpr.getSQLState(), null); + mdf.error(ss.out, cpe.getErrorMessage(), cpe.getResponseCode(), cpe.getSqlState(), null); return cpr; } - if (cpr.getException() instanceof HiveException) { - HiveException rc = (HiveException)cpr.getException(); - mdf.error(ss.out, cpr.getErrorMessage(), rc.getCanonicalErrorMsg().getErrorCode(), cpr.getSQLState(), + if (cpe.getException() instanceof HiveException) { + HiveException rc = (HiveException)cpe.getException(); + mdf.error(ss.out, cpe.getErrorMessage(), rc.getCanonicalErrorMsg().getErrorCode(), cpe.getSqlState(), rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? StringUtils.stringifyException(rc) : null); } else { - ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(cpr.getException().getMessage()); - mdf.error(ss.out, cpr.getErrorMessage(), canonicalMsg.getErrorCode(), cpr.getSQLState(), - StringUtils.stringifyException(cpr.getException())); + ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(cpe.getException().getMessage()); + mdf.error(ss.out, cpe.getErrorMessage(), canonicalMsg.getErrorCode(), cpe.getSqlState(), + StringUtils.stringifyException(cpe.getException())); } } catch (HiveException ex) { console.printError("Unable to JSON-encode the error", StringUtils.stringifyException(ex)); @@ -1291,9 +1295,9 @@ public CommandProcessorResponse compileAndRespond(String command) { public CommandProcessorResponse compileAndRespond(String command, boolean cleanupTxnList) { try { compileInternal(command, false); - return createProcessorResponse(0, null, null, null); - } catch (CommandProcessorResponse e) { - return e; + return CommandProcessorResponse.createSuccessful(schema); + } catch (CommandProcessorException cpe) { + return CommandProcessorResponse.createFailure(cpe, schema); } finally { if (cleanupTxnList) { // Valid txn list might be generated for a query compiled using this @@ -1303,7 +1307,7 @@ public CommandProcessorResponse compileAndRespond(String command, boolean cleanu } } - public void lockAndRespond() throws CommandProcessorResponse { + public void lockAndRespond() throws CommandProcessorException { // Assumes the query has already been compiled if (plan == null) { throw new IllegalStateException( @@ -1313,14 +1317,14 @@ public void lockAndRespond() throws CommandProcessorResponse { if (requiresLock()) { try { acquireLocks(); - } catch (CommandProcessorResponse cpr) { - rollback(cpr); - throw cpr; + } catch (CommandProcessorException cpe) { + rollback(cpe); + throw cpe; } } } - private void compileInternal(String command, boolean deferClose) throws CommandProcessorResponse { + private void compileInternal(String command, boolean deferClose) throws CommandProcessorException { Metrics metrics = MetricsFactory.getInstance(); if (metrics != null) { metrics.incrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1); @@ -1339,18 +1343,18 @@ private void compileInternal(String command, boolean deferClose) throws CommandP } if (!success) { String errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg(); - throw createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(), errorMessage, null, null); + throw createProcessorException(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(), errorMessage, null, null); } try { compile(command, true, deferClose); - } catch (CommandProcessorResponse cpr) { + } catch (CommandProcessorException cpe) { try { releaseLocksAndCommitOrRollback(false); } catch (LockException e) { LOG.warn("Exception in releasing locks. " + StringUtils.stringifyException(e)); } - throw cpr; + throw cpe; } } //Save compile-time PerfLogging for WebUI. @@ -1360,7 +1364,7 @@ private void compileInternal(String command, boolean deferClose) throws CommandP queryDisplay.setPerfLogEnds(QueryDisplay.Phase.COMPILATION, perfLogger.getEndTimes()); } - private void runInternal(String command, boolean alreadyCompiled) throws CommandProcessorResponse { + private void runInternal(String command, boolean alreadyCompiled) throws CommandProcessorException { DriverState.setDriverState(driverState); driverState.lock(); @@ -1371,7 +1375,7 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command } else { String errorMessage = "FAILED: Precompiled query has been cancelled or closed."; console.printError(errorMessage); - throw createProcessorResponse(12, errorMessage, null, null); + throw createProcessorException(12, errorMessage, null, null); } } else { driverState.compiling(); @@ -1392,7 +1396,7 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command } catch (Exception e) { String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); console.printError(errorMessage + "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(12, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); + throw createProcessorException(12, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } if (!alreadyCompiled) { @@ -1461,9 +1465,9 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command try { execute(); - } catch (CommandProcessorResponse cpr) { - rollback(cpr); - throw cpr; + } catch (CommandProcessorException cpe) { + rollback(cpe); + throw cpe; } //if needRequireLock is false, the release here will do nothing because there is no lock @@ -1494,7 +1498,7 @@ else if(plan.getOperation() == HiveOperation.ROLLBACK) { } catch (Exception e) { String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); console.printError(errorMessage + "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(12, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); + throw createProcessorException(12, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } isFinishedWithError = false; } finally { @@ -1512,26 +1516,27 @@ else if(plan.getOperation() == HiveOperation.ROLLBACK) { driverState.unlock(); } } + + SessionState.getPerfLogger().cleanupPerfLogMetrics(); } - private CommandProcessorResponse rollback(CommandProcessorResponse cpr) throws CommandProcessorResponse { + private void rollback(CommandProcessorException cpe) throws CommandProcessorException { //console.printError(cpr.toString()); try { releaseLocksAndCommitOrRollback(false); - } - catch (LockException e) { - LOG.error("rollback() FAILED: " + cpr);//make sure not to loose + } catch (LockException e) { + LOG.error("rollback() FAILED: " + cpe);//make sure not to loose handleHiveException(e, 12, "Additional info in hive.log at \"rollback() FAILED\""); } - return cpr; } - private CommandProcessorResponse handleHiveException(HiveException e, int ret) throws CommandProcessorResponse { + private CommandProcessorException handleHiveException(HiveException e, int ret) throws CommandProcessorException { return handleHiveException(e, ret, null); } - private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { + private CommandProcessorException handleHiveException(HiveException e, int ret, String rootMsg) + throws CommandProcessorException { String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; @@ -1539,7 +1544,7 @@ private CommandProcessorResponse handleHiveException(HiveException e, int ret, S String sqlState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); console.printError(errorMessage + "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(ret, errorMessage, sqlState, e); + throw createProcessorException(ret, errorMessage, sqlState, e); } private boolean requiresLock() { if (!checkConcurrency()) { @@ -1587,18 +1592,17 @@ private boolean isExplicitLockOperation() { return false; } - private CommandProcessorResponse createProcessorResponse(int ret, String errorMessage, String sqlState, + private CommandProcessorException createProcessorException(int ret, String errorMessage, String sqlState, Throwable downstreamError) { SessionState.getPerfLogger().cleanupPerfLogMetrics(); queryDisplay.setErrorMessage(errorMessage); - if(downstreamError != null && downstreamError instanceof HiveException) { + if (downstreamError != null && downstreamError instanceof HiveException) { ErrorMsg em = ((HiveException)downstreamError).getCanonicalErrorMsg(); - if(em != null) { - return new CommandProcessorResponse(ret, errorMessage, sqlState, - schema, downstreamError, em.getErrorCode(), null); + if (em != null) { + return new CommandProcessorException(ret, em.getErrorCode(), errorMessage, sqlState, downstreamError); } } - return new CommandProcessorResponse(ret, errorMessage, sqlState, downstreamError); + return new CommandProcessorException(ret, -1, errorMessage, sqlState, downstreamError); } private void useFetchFromCache(CacheEntry cacheEntry) { @@ -1665,7 +1669,7 @@ private void postExecutionCacheActions() throws Exception { } } - private void execute() throws CommandProcessorResponse { + private void execute() throws CommandProcessorException { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DRIVER_EXECUTE); @@ -1691,7 +1695,7 @@ private void execute() throws CommandProcessorResponse { if (!driverState.isCompiled() && !driverState.isExecuting()) { String errorMessage = "FAILED: unexpected driverstate: " + driverState + ", for query " + queryStr; console.printError(errorMessage); - throw createProcessorResponse(1000, errorMessage, "HY008", null); + throw createProcessorException(1000, errorMessage, "HY008", null); } else { driverState.executing(); } @@ -1857,7 +1861,7 @@ private void execute() throws CommandProcessorResponse { // in case we decided to run everything in local mode, restore the // the jobtracker setting to its initial value ctx.restoreOriginalTracker(); - throw createProcessorResponse(exitVal, errorMessage, sqlState, result.getTaskError()); + throw createProcessorException(exitVal, errorMessage, sqlState, result.getTaskError()); } } @@ -1889,7 +1893,7 @@ private void execute() throws CommandProcessorResponse { String errorMessage = "FAILED: Operation cancelled"; invokeFailureHooks(perfLogger, hookContext, errorMessage, null); console.printError(errorMessage); - throw createProcessorResponse(1000, errorMessage, "HY008", null); + throw createProcessorException(1000, errorMessage, "HY008", null); } // remove incomplete outputs. @@ -1917,9 +1921,9 @@ private void execute() throws CommandProcessorResponse { SessionState.get().getHiveHistory().printRowCount(queryId); } releasePlan(plan); - } catch (CommandProcessorResponse cpr) { + } catch (CommandProcessorException cpe) { executionError = true; - throw cpr; + throw cpe; } catch (Throwable e) { executionError = true; @@ -1940,7 +1944,7 @@ private void execute() throws CommandProcessorResponse { } } console.printError(errorMessage + "\n" + StringUtils.stringifyException(e)); - throw createProcessorResponse(12, errorMessage, "08S01", e); + throw createProcessorException(12, errorMessage, "08S01", e); } finally { // Trigger query hooks after query completes its execution. try { diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java new file mode 100644 index 0000000000..5de5c389a3 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorException.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.processors; + +/** + * Exception thrown during command processing class. + */ +public class CommandProcessorException extends Exception { + private static final long serialVersionUID = 1L; + + private final int responseCode; + private final int hiveErrorCode; + private final String errorMessage; + private final String sqlState; + private final Throwable exception; + + public CommandProcessorException(int responseCode, int hiveErrorCode, String errorMessage, String sqlState, + Throwable exception) { + this.responseCode = responseCode; + this.hiveErrorCode = hiveErrorCode; + this.errorMessage = errorMessage; + this.sqlState = sqlState; + this.exception = exception; + } + + public int getResponseCode() { + return responseCode; + } + + public int getErrorCode() { + return hiveErrorCode; + } + + public String getErrorMessage() { + return errorMessage; + } + + public String getSqlState() { + return sqlState; + } + + public Throwable getException() { + return exception; + } + + @Override + public String toString() { + return "(responseCode = " + responseCode + ", errorMessage = " + errorMessage + ", " + + (hiveErrorCode > 0 ? "hiveErrorCode = " + hiveErrorCode + ", " : "" ) + + "SQLState = " + sqlState + + (exception == null ? "" : ", exception = " + exception.getMessage()) + ")"; + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java index 94cfa5178c..36ed87b08e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java @@ -21,7 +21,6 @@ import java.util.List; import org.apache.hadoop.hive.metastore.api.Schema; -import org.apache.hadoop.hive.ql.ErrorMsg; /** * Encapsulates the basic response info returned by classes the implement the @@ -30,11 +29,11 @@ * is not 0. Note that often {@code responseCode} ends up the exit value of * command shell process so should keep it to < 127. */ -public class CommandProcessorResponse extends Exception { +public class CommandProcessorResponse { private final int responseCode; private final String errorMessage; private final int hiveErrorCode; - private final String SQLState; + private final String sqlState; private final Schema resSchema; private final Throwable exception; @@ -44,73 +43,93 @@ public CommandProcessorResponse(int responseCode) { this(responseCode, null, null, null, null); } - public CommandProcessorResponse(int responseCode, String errorMessage, String SQLState) { - this(responseCode, errorMessage, SQLState, null, null); + public CommandProcessorResponse(int responseCode, String errorMessage, String sqlState) { + this(responseCode, errorMessage, sqlState, null, null); } public CommandProcessorResponse(int responseCode, List consoleMessages) { this(responseCode, null, null, null, null, -1, consoleMessages); } - public CommandProcessorResponse(int responseCode, String errorMessage, String SQLState, Throwable exception) { - this(responseCode, errorMessage, SQLState, null, exception); + public CommandProcessorResponse(int responseCode, String errorMessage, String sqlState, Throwable exception) { + this(responseCode, errorMessage, sqlState, null, exception); } - public CommandProcessorResponse(int responseCode, String errorMessage, String SQLState, Schema schema) { - this(responseCode, errorMessage, SQLState, schema, null); - } - public CommandProcessorResponse(int responseCode, ErrorMsg canonicalErrMsg, Throwable t, String ... msgArgs) { - this(responseCode, canonicalErrMsg.format(msgArgs), - canonicalErrMsg.getSQLState(), null, t, canonicalErrMsg.getErrorCode(), null); + public CommandProcessorResponse(int responseCode, String errorMessage, String sqlState, Schema schema) { + this(responseCode, errorMessage, sqlState, schema, null); } /** * Create CommandProcessorResponse object indicating an error. - * Creates new CommandProcessorResponse with responseCode=1, and sets message - * from exception argument - * - * @param e - * @return + * Creates new CommandProcessorResponse with responseCode=1, and sets message from exception argument */ public static CommandProcessorResponse create(Exception e) { return new CommandProcessorResponse(1, e.getMessage(), null); } - public CommandProcessorResponse(int responseCode, String errorMessage, String SQLState, - Schema schema, Throwable exception) { - this(responseCode, errorMessage, SQLState, schema, exception, -1, null); + public static CommandProcessorResponse createSuccessful(Schema schema) { + return new CommandProcessorResponse(0, null, null, schema, null, -1, null); + } + + public static CommandProcessorResponse createFailure(CommandProcessorException cpe, Schema schema) { + return new CommandProcessorResponse(cpe.getResponseCode(), cpe.getErrorMessage(), cpe.getSqlState(), schema, + cpe.getException(), cpe.getErrorCode(), null); } - public CommandProcessorResponse(int responseCode, String errorMessage, String SQLState, - Schema schema, Throwable exception, int hiveErrorCode, List consoleMessages) { + + public CommandProcessorResponse(int responseCode, String errorMessage, String sqlState, Schema schema, + Throwable exception) { + this(responseCode, errorMessage, sqlState, schema, exception, -1, null); + } + + public CommandProcessorResponse(int responseCode, String errorMessage, String sqlState, Schema schema, + Throwable exception, int hiveErrorCode, List consoleMessages) { this.responseCode = responseCode; this.errorMessage = errorMessage; - this.SQLState = SQLState; + this.sqlState = sqlState; this.resSchema = schema; this.exception = exception; this.hiveErrorCode = hiveErrorCode; this.consoleMessages = consoleMessages; } - public int getResponseCode() { return responseCode; } - public String getErrorMessage() { return errorMessage; } - public String getSQLState() { return SQLState; } - public Schema getSchema() { return resSchema; } - public Throwable getException() { return exception; } + public int getResponseCode() { + return responseCode; + } + + public boolean failed() { + return responseCode != 0; + } + + public String getErrorMessage() { + return errorMessage; + } + + public int getErrorCode() { + return hiveErrorCode; + } + + public String getSQLState() { + return sqlState; + } + + public Schema getSchema() { + return resSchema; + } + + public Throwable getException() { + return exception; + } public List getConsoleMessages() { return consoleMessages; } - public int getErrorCode() { return hiveErrorCode; } + @Override public String toString() { return "(responseCode = " + responseCode + ", errorMessage = " + errorMessage + ", " + (hiveErrorCode > 0 ? "hiveErrorCode = " + hiveErrorCode + ", " : "" ) + - "SQLState = " + SQLState + + "SQLState = " + sqlState + (resSchema == null ? "" : ", resSchema = " + resSchema) + (exception == null ? "" : ", exception = " + exception.getMessage()) + ")"; } - - public boolean failed() { - return responseCode != 0; - } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java index 5c760e883c..6d251a6e8b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java @@ -80,6 +80,7 @@ import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.PlanUtils; import org.apache.hadoop.hive.ql.plan.TezWork; +import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.UDFType; @@ -94,7 +95,6 @@ import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.SplitLocationInfo; -import org.apache.hadoop.registry.client.binding.RegistryUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -340,7 +340,7 @@ public PlanFragment createPlanFragment(String query, int num, ApplicationId spli // The read will have READ_COMMITTED level semantics. try { driver.lockAndRespond(); - } catch (CommandProcessorResponse cpr1) { + } catch (CommandProcessorException cpr1) { throw new HiveException("Failed to acquire locks", cpr1); } diff --git ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java index a9917cf109..40c24cef09 100644 --- ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java +++ ql/src/test/org/apache/hadoop/hive/ql/TestCompileLock.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hive.common.metrics.common.MetricsFactory; import org.apache.hadoop.hive.common.metrics.metrics2.CodahaleMetrics; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; @@ -289,7 +290,7 @@ public void testParallelCompilationWithMultipleQuotasAndClientSessionConcurrency } catch (ExecutionException ex) { responseList.add( - (ex.getCause() instanceof CommandProcessorResponse) ? + (ex.getCause() instanceof CommandProcessorException) ? new CommandProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode()) : new CommandProcessorResponse(CONCURRENT_COMPILATION)); } diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java index e4535ca6e1..c81ab2d6a7 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; -import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.processors.CommandProcessorException; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; import org.junit.Test; @@ -60,7 +60,7 @@ public void testDecimalType3() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("Decimal precision out of allowed range [1,38]")); @@ -76,7 +76,7 @@ public void testDecimalType4() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("Decimal precision out of allowed range [1,38]")); @@ -92,7 +92,7 @@ public void testDecimalType5() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("Decimal scale must be less than or equal to precision")); @@ -108,7 +108,7 @@ public void testDecimalType6() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("extraneous input '-' expecting Number")); @@ -124,7 +124,7 @@ public void testDecimalType7() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("missing ) at ',' near ',' in column name or constraint")); @@ -140,7 +140,7 @@ public void testDecimalType8() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("mismatched input '7a' expecting Number near '('")); @@ -156,7 +156,7 @@ public void testDecimalType9() throws ParseException { Driver driver = createDriver(); try { driver.compile(query, true, false); - } catch (CommandProcessorResponse cpe) { + } catch (CommandProcessorException cpe) { Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); Assert.assertTrue(cpe.getErrorMessage(), cpe.getErrorMessage().contains("Decimal scale must be less than or equal to precision")); diff --git ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestReOptimization.java ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestReOptimization.java index 4283c2275b..ee8c7c0bfc 100644 --- ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestReOptimization.java +++ ql/src/test/org/apache/hadoop/hive/ql/plan/mapping/TestReOptimization.java @@ -23,7 +23,6 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.Optional; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.hadoop.hive.conf.HiveConf; @@ -33,7 +32,6 @@ import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter; -import org.apache.hadoop.hive.ql.optimizer.signature.RelTreeSignature; import org.apache.hadoop.hive.ql.plan.Statistics; import org.apache.hadoop.hive.ql.plan.mapper.PlanMapper; import org.apache.hadoop.hive.ql.plan.mapper.StatsSources; @@ -49,6 +47,7 @@ import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import org.junit.rules.TestRule; public class TestReOptimization { @@ -99,10 +98,10 @@ public static void dropTables(IDriver driver) throws Exception { } } - private PlanMapper getMapperForQuery(IDriver driver, String query) throws CommandProcessorResponse { + private PlanMapper getMapperForQuery(IDriver driver, String query) { CommandProcessorResponse res = driver.run(query); if (res.getResponseCode() != 0) { - throw res; + throw new RuntimeException("running the query " + query + " was not successful"); } PlanMapper pm0 = driver.getContext().getPlanMapper(); return pm0; @@ -152,14 +151,19 @@ public void testReExecutedIfMapJoinError() throws Exception { } - @Test(expected = CommandProcessorResponse.class) + @Rule + public ExpectedException exceptionRule = ExpectedException.none(); + + @Test public void testNotReExecutedIfAssertionError() throws Exception { IDriver driver = createDriver("reoptimize"); String query = "select assert_true(${hiveconf:zzz}>sum(1)) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; + exceptionRule.expect(RuntimeException.class); + exceptionRule.expectMessage("running the query " + query + " was not successful"); + getMapperForQuery(driver, query); - assertEquals(1, driver.getContext().getExecutionIndex()); } @Test @@ -192,7 +196,7 @@ public void testStatCachingMetaStore() throws Exception { private void checkRuntimeStatsReuse( boolean expectInSameSession, boolean expectNewHs2Session, - boolean expectHs2Instance) throws CommandProcessorResponse { + boolean expectHs2Instance) { { // same session IDriver driver = createDriver("reoptimize"); @@ -216,7 +220,7 @@ private void checkRuntimeStatsReuse( } @SuppressWarnings("rawtypes") - private void checkUsageOfRuntimeStats(IDriver driver, boolean expected) throws CommandProcessorResponse { + private void checkUsageOfRuntimeStats(IDriver driver, boolean expected) { String query = "select sum(u) from tu join tv on (tu.id_uv=tv.id_uv) where u<10 and v>1"; PlanMapper pm = getMapperForQuery(driver, query); assertEquals(1, driver.getContext().getExecutionIndex()); @@ -272,8 +276,6 @@ public void testReOptimizationCanSendBackStatsToCBO() throws Exception { FilterOperator fo = fos.get(0); OperatorStats os = oss.get(0); - Optional prevOs = driver.getContext().getStatsSource().lookup(RelTreeSignature.of(hf)); - long cntFilter = RelMetadataQuery.instance().getRowCount(hf).longValue(); if (fo.getStatistics() != null) { // in case the join order is changed the subTree-s are not matching anymore because an RS is present in the condition