diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 64375c159c..1c301e8d6d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -150,9 +150,6 @@ private DriverContext driverCxt; private QueryPlan plan; private Schema schema; - private String errorMessage; - private String SQLState; - private Throwable downstreamError; private FetchTask fetchTask; private List hiveLocks = new ArrayList(); @@ -356,7 +353,7 @@ public int compile(String command, boolean resetTaskIds) { // interrupted, it should be set to true if the compile is called within another method like // runInternal, which defers the close to the called in that method. @VisibleForTesting - void compile(String command, boolean resetTaskIds, boolean deferClose) throws CommandProcessorResponse { + public void compile(String command, boolean resetTaskIds, boolean deferClose) throws CommandProcessorResponse { PerfLogger perfLogger = SessionState.getPerfLogger(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.COMPILE); driverState.lock(); @@ -568,11 +565,8 @@ public void run() { CommandAuthorizer.doAuthorization(queryState.getHiveOperation(), sem, command); } } catch (AuthorizationException authExp) { - console.printError("Authorization failed:" + authExp.getMessage() - + ". Use SHOW GRANT to get more details."); - errorMessage = authExp.getMessage(); - SQLState = "42000"; - throw createProcessorResponse(403); + console.printError("Authorization failed:" + authExp.getMessage() + ". Use SHOW GRANT to get more details."); + throw createProcessorResponse(403, authExp.getMessage(), "42000", null); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); } @@ -598,7 +592,7 @@ public void run() { compileError = true; ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage()); - errorMessage = "FAILED: " + e.getClass().getSimpleName(); + String errorMessage = "FAILED: " + e.getClass().getSimpleName(); if (error != ErrorMsg.GENERIC_ERROR) { errorMessage += " [Error " + error.getErrorCode() + "]:"; } @@ -614,11 +608,9 @@ public void run() { errorMessage += ". Failed command: " + queryStr; } - SQLState = error.getSQLState(); - downstreamError = e; console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw createProcessorResponse(error.getErrorCode()); + throw createProcessorResponse(error.getErrorCode(), errorMessage, error.getSQLState(), e); } finally { // Trigger post compilation hook. Note that if the compilation fails here then // before/after execution hook will never be executed. @@ -784,14 +776,9 @@ private void setLastReplIdForDump(HiveConf conf) throws HiveException, TExceptio } private void openTransaction() throws LockException, CommandProcessorResponse { - if (checkConcurrency() && startImplicitTxn(queryTxnMgr)) { + if (checkConcurrency() && startImplicitTxn(queryTxnMgr) && !queryTxnMgr.isTxnOpen()) { String userFromUGI = getUserFromUGI(); - if (!queryTxnMgr.isTxnOpen()) { - if (userFromUGI == null) { - throw createProcessorResponse(10); - } - queryTxnMgr.openTxn(ctx, userFromUGI); - } + queryTxnMgr.openTxn(ctx, userFromUGI); } } @@ -847,24 +834,18 @@ private boolean startImplicitTxn(HiveTxnManager txnManager) throws LockException return shouldOpenImplicitTxn; } - private int handleInterruptionWithHook(String msg, HookContext hookContext, - PerfLogger perfLogger) { - SQLState = "HY008"; //SQLState for cancel operation - errorMessage = "FAILED: command has been interrupted: " + msg; - console.printError(errorMessage); - if (hookContext != null) { - try { - invokeFailureHooks(perfLogger, hookContext, errorMessage, null); - } catch (Exception e) { - LOG.warn("Caught exception attempting to invoke Failure Hooks", e); - } - } - return 1000; - } - private void checkInterrupted(String msg, HookContext hookContext, PerfLogger perfLogger) throws CommandProcessorResponse { if (driverState.isAborted()) { - throw createProcessorResponse(handleInterruptionWithHook(msg, hookContext, perfLogger)); + String errorMessage = "FAILED: command has been interrupted: " + msg; + console.printError(errorMessage); + if (hookContext != null) { + try { + invokeFailureHooks(perfLogger, hookContext, errorMessage, null); + } catch (Exception e) { + LOG.warn("Caught exception attempting to invoke Failure Hooks", e); + } + } + throw createProcessorResponse(1000, errorMessage, "HY008", null); } } @@ -1071,19 +1052,16 @@ private void addTableFromEntity(Entity entity, Collection tableList) { return result; } - private String getUserFromUGI() { + private String getUserFromUGI() throws CommandProcessorResponse { // Don't use the userName member, as it may or may not have been set. Get the value from // conf, which calls into getUGI to figure out who the process is running as. try { return conf.getUser(); } catch (IOException e) { - errorMessage = "FAILED: Error in determining user while acquiring locks: " + e.getMessage(); - SQLState = ErrorMsg.findSQLState(e.getMessage()); - downstreamError = e; - console.printError(errorMessage, - "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + String errorMessage = "FAILED: Error in determining user while acquiring locks: " + e.getMessage(); + console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + throw createProcessorResponse(10, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } - return null; } /** @@ -1108,9 +1086,6 @@ private void acquireLocks() throws CommandProcessorResponse { } try { String userFromUGI = getUserFromUGI(); - if(userFromUGI == null) { - throw createProcessorResponse(10); - } // Set the table write id in all of the acid file sinks if (!plan.getAcidSinks().isEmpty()) { @@ -1176,12 +1151,9 @@ private void acquireLocks() throws CommandProcessorResponse { } } catch (Exception e) { - errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage(); - SQLState = ErrorMsg.findSQLState(e.getMessage()); - downstreamError = e; - console.printError(errorMessage, "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw createProcessorResponse(10); + String errorMessage = "FAILED: Error in acquiring locks: " + e.getMessage(); + console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + throw createProcessorResponse(10, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } finally { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ACQUIRE_READ_WRITE_LOCKS); } @@ -1266,59 +1238,51 @@ public CommandProcessorResponse run(String command, boolean alreadyCompiled) { try { runInternal(command, alreadyCompiled); - return createProcessorResponse(0); + return createProcessorResponse(0, null, null, null); } catch (CommandProcessorResponse cpr) { - - SessionState ss = SessionState.get(); - if(ss == null) { - return cpr; - } - MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf()); - if(!(mdf instanceof JsonMetaDataFormatter)) { - return cpr; - } - /*Here we want to encode the error in machine readable way (e.g. JSON) - * Ideally, errorCode would always be set to a canonical error defined in ErrorMsg. - * In practice that is rarely the case, so the messy logic below tries to tease - * out canonical error code if it can. Exclude stack trace from output when - * the error is a specific/expected one. - * It's written to stdout for backward compatibility (WebHCat consumes it).*/ - try { - if(downstreamError == null) { - mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState); + SessionState ss = SessionState.get(); + if (ss == null) { return cpr; } - ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode()); - if(canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) { - /*Some HiveExceptions (e.g. SemanticException) don't set - canonical ErrorMsg explicitly, but there is logic - (e.g. #compile()) to find an appropriate canonical error and - return its code as error code. In this case we want to - preserve it for downstream code to interpret*/ - mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState, null); + MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf()); + if (!(mdf instanceof JsonMetaDataFormatter)) { return cpr; } - if(downstreamError instanceof HiveException) { - HiveException rc = (HiveException) downstreamError; - mdf.error(ss.out, errorMessage, - rc.getCanonicalErrorMsg().getErrorCode(), SQLState, - rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? - org.apache.hadoop.util.StringUtils.stringifyException(rc) - : null); - } - else { - ErrorMsg canonicalMsg = - ErrorMsg.getErrorMsg(downstreamError.getMessage()); - mdf.error(ss.out, errorMessage, canonicalMsg.getErrorCode(), - SQLState, org.apache.hadoop.util.StringUtils. - stringifyException(downstreamError)); + /*Here we want to encode the error in machine readable way (e.g. JSON) + * Ideally, errorCode would always be set to a canonical error defined in ErrorMsg. + * In practice that is rarely the case, so the messy logic below tries to tease + * out canonical error code if it can. Exclude stack trace from output when + * the error is a specific/expected one. + * It's written to stdout for backward compatibility (WebHCat consumes it).*/ + try { + if (cpr.getException() == null) { + mdf.error(ss.out, cpr.getErrorMessage(), cpr.getResponseCode(), cpr.getSQLState()); + return cpr; + } + ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode()); + if (canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) { + /*Some HiveExceptions (e.g. SemanticException) don't set + canonical ErrorMsg explicitly, but there is logic + (e.g. #compile()) to find an appropriate canonical error and + return its code as error code. In this case we want to + preserve it for downstream code to interpret*/ + mdf.error(ss.out, cpr.getErrorMessage(), cpr.getResponseCode(), cpr.getSQLState(), null); + return cpr; + } + if (cpr.getException() instanceof HiveException) { + HiveException rc = (HiveException)cpr.getException(); + mdf.error(ss.out, cpr.getErrorMessage(), rc.getCanonicalErrorMsg().getErrorCode(), cpr.getSQLState(), + rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ? + org.apache.hadoop.util.StringUtils.stringifyException(rc) : null); + } else { + ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(cpr.getException().getMessage()); + mdf.error(ss.out, cpr.getErrorMessage(), canonicalMsg.getErrorCode(), cpr.getSQLState(), + org.apache.hadoop.util.StringUtils.stringifyException(cpr.getException())); + } + } catch (HiveException ex) { + console.printError("Unable to JSON-encode the error", org.apache.hadoop.util.StringUtils.stringifyException(ex)); } - } - catch(HiveException ex) { - console.printError("Unable to JSON-encode the error", - org.apache.hadoop.util.StringUtils.stringifyException(ex)); - } - return cpr; + return cpr; } } @@ -1330,7 +1294,7 @@ public CommandProcessorResponse compileAndRespond(String command) { public CommandProcessorResponse compileAndRespond(String command, boolean cleanupTxnList) { try { compileInternal(command, false); - return createProcessorResponse(0); + return createProcessorResponse(0, null, null, null); } catch (CommandProcessorResponse e) { return e; } finally { @@ -1377,8 +1341,8 @@ private void compileInternal(String command, boolean deferClose) throws CommandP metrics.decrementCounter(MetricsConstant.WAITING_COMPILE_OPS, 1); } if (!success) { - errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg(); - throw createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode()); + String errorMessage = ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCodedMsg(); + throw createProcessorResponse(ErrorMsg.COMPILE_LOCK_TIMED_OUT.getErrorCode(), errorMessage, null, null); } try { @@ -1400,9 +1364,6 @@ private void compileInternal(String command, boolean deferClose) throws CommandP } private void runInternal(String command, boolean alreadyCompiled) throws CommandProcessorResponse { - errorMessage = null; - SQLState = null; - downstreamError = null; DriverState.setDriverState(driverState); driverState.lock(); @@ -1411,9 +1372,9 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command if (driverState.isCompiled()) { driverState.executing(); } else { - errorMessage = "FAILED: Precompiled query has been cancelled or closed."; + String errorMessage = "FAILED: Precompiled query has been cancelled or closed."; console.printError(errorMessage); - throw createProcessorResponse(12); + throw createProcessorResponse(12, errorMessage, null, null); } } else { driverState.compiling(); @@ -1432,12 +1393,10 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command try { hookRunner.runPreDriverHooks(hookContext); } catch (Exception e) { - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); - SQLState = ErrorMsg.findSQLState(e.getMessage()); - downstreamError = e; + String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw createProcessorResponse(12); + throw createProcessorResponse(12, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } if (!alreadyCompiled) { @@ -1537,12 +1496,10 @@ else if(plan.getOperation() == HiveOperation.ROLLBACK) { try { hookRunner.runPostDriverHooks(hookContext); } catch (Exception e) { - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); - SQLState = ErrorMsg.findSQLState(e.getMessage()); - downstreamError = e; + String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw createProcessorResponse(12); + throw createProcessorResponse(12, errorMessage, ErrorMsg.findSQLState(e.getMessage()), e); } isFinishedWithError = false; } finally { @@ -1580,16 +1537,15 @@ private CommandProcessorResponse handleHiveException(HiveException e, int ret) t } private CommandProcessorResponse handleHiveException(HiveException e, int ret, String rootMsg) throws CommandProcessorResponse { - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); + String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if(rootMsg != null) { errorMessage += "\n" + rootMsg; } - SQLState = e.getCanonicalErrorMsg() != null ? + String SQLState = e.getCanonicalErrorMsg() != null ? e.getCanonicalErrorMsg().getSQLState() : ErrorMsg.findSQLState(e.getMessage()); - downstreamError = e; console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw createProcessorResponse(ret); + throw createProcessorResponse(ret, errorMessage, SQLState, e); } private boolean requiresLock() { if (!checkConcurrency()) { @@ -1637,7 +1593,8 @@ private boolean isExplicitLockOperation() { return false; } - private CommandProcessorResponse createProcessorResponse(int ret) { + private CommandProcessorResponse createProcessorResponse(int ret, String errorMessage, String SQLState, + Throwable downstreamError) { SessionState.getPerfLogger().cleanupPerfLogMetrics(); queryDisplay.setErrorMessage(errorMessage); if(downstreamError != null && downstreamError instanceof HiveException) { @@ -1737,11 +1694,10 @@ private void execute() throws CommandProcessorResponse { try { // if query is not in compiled state, or executing state which is carried over from // a combined compile/execute in runInternal, throws the error - if (driverState.isCompiled() && driverState.isExecuting()) { - SQLState = "HY008"; - errorMessage = "FAILED: unexpected driverstate: " + driverState + ", for query " + queryStr; + if (!driverState.isCompiled() && !driverState.isExecuting()) { + String errorMessage = "FAILED: unexpected driverstate: " + driverState + ", for query " + queryStr; console.printError(errorMessage); - throw createProcessorResponse(1000); + throw createProcessorResponse(1000, errorMessage, "HY008", null); } else { driverState.executing(); } @@ -1872,7 +1828,7 @@ private void execute() throws CommandProcessorResponse { if (exitVal != 0) { Task backupTask = tsk.getAndInitBackupTask(); if (backupTask != null) { - setErrorMsgAndDetail(exitVal, result.getTaskError(), tsk); + String errorMessage = getErrorMsgAndDetail(exitVal, result.getTaskError(), tsk); console.printError(errorMessage); errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName(); console.printError(errorMessage); @@ -1884,13 +1840,13 @@ private void execute() throws CommandProcessorResponse { continue; } else { - setErrorMsgAndDetail(exitVal, result.getTaskError(), tsk); + String errorMessage = getErrorMsgAndDetail(exitVal, result.getTaskError(), tsk); if (driverCxt.isShutdown()) { errorMessage = "FAILED: Operation cancelled. " + errorMessage; } invokeFailureHooks(perfLogger, hookContext, errorMessage + Strings.nullToEmpty(tsk.getDiagnosticsMessage()), result.getTaskError()); - SQLState = "08S01"; + String SQLState = "08S01"; // 08S01 (Communication error) is the default sql state. Override the sqlstate // based on the ErrorMsg set in HiveException. @@ -1907,7 +1863,7 @@ private void execute() throws CommandProcessorResponse { // in case we decided to run everything in local mode, restore the // the jobtracker setting to its initial value ctx.restoreOriginalTracker(); - throw createProcessorResponse(exitVal); + throw createProcessorResponse(exitVal, errorMessage, SQLState, result.getTaskError()); } } @@ -1936,11 +1892,10 @@ private void execute() throws CommandProcessorResponse { ctx.restoreOriginalTracker(); if (driverCxt.isShutdown()) { - SQLState = "HY008"; - errorMessage = "FAILED: Operation cancelled"; + String errorMessage = "FAILED: Operation cancelled"; invokeFailureHooks(perfLogger, hookContext, errorMessage, null); console.printError(errorMessage); - throw createProcessorResponse(1000); + throw createProcessorResponse(1000, errorMessage, "HY008", null); } // remove incomplete outputs. @@ -1982,7 +1937,7 @@ private void execute() throws CommandProcessorResponse { String.valueOf(12)); } // TODO: do better with handling types of Exception here - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); + String errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); if (hookContext != null) { try { invokeFailureHooks(perfLogger, hookContext, errorMessage, e); @@ -1990,11 +1945,8 @@ private void execute() throws CommandProcessorResponse { LOG.warn("Failed to invoke failure hook", t); } } - SQLState = "08S01"; - downstreamError = e; - console.printError(errorMessage + "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - throw createProcessorResponse(12); + console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); + throw createProcessorResponse(12, errorMessage, "08S01", e); } finally { // Trigger query hooks after query completes its execution. try { @@ -2111,10 +2063,9 @@ private void logMrWarning(int mrJobs) { LOG.warn(warning); } - private void setErrorMsgAndDetail(int exitVal, Throwable downstreamError, Task tsk) { - this.downstreamError = downstreamError; - errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); - if(downstreamError != null) { + private String getErrorMsgAndDetail(int exitVal, Throwable downstreamError, Task tsk) { + String errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); + if (downstreamError != null) { //here we assume that upstream code may have parametrized the msg from ErrorMsg //so we want to keep it if (downstreamError.getMessage() != null) { @@ -2129,6 +2080,8 @@ private void setErrorMsgAndDetail(int exitVal, Throwable downstreamError, Task t errorMessage += ". " + em.getMsg(); } } + + return errorMessage; } private void invokeFailureHooks(PerfLogger perfLogger, @@ -2455,11 +2408,6 @@ public void destroy() { ShutdownHookManager.removeShutdownHook(shutdownRunner); } - public String getErrorMsg() { - return errorMessage; - } - - @Override public QueryDisplay getQueryDisplay() { return queryDisplay; diff --git ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java index 50967637e7..e4535ca6e1 100644 --- ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java +++ ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.ddl.DDLTask; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.creation.CreateTableDesc; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; import org.junit.Test; @@ -57,10 +58,15 @@ public void testDecimalType3() throws ParseException { String query = "create table `dec` (d decimal(66,7))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("Decimal precision out of allowed range [1,38]")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } @Test @@ -68,10 +74,15 @@ public void testDecimalType4() throws ParseException { String query = "create table `dec` (d decimal(0,7))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("Decimal precision out of allowed range [1,38]")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } @Test @@ -79,10 +90,15 @@ public void testDecimalType5() throws ParseException { String query = "create table `dec` (d decimal(7,33))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("Decimal scale must be less than or equal to precision")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } @Test @@ -90,10 +106,15 @@ public void testDecimalType6() throws ParseException { String query = "create table `dec` (d decimal(7,-1))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("extraneous input '-' expecting Number")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("extraneous input '-' expecting Number")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } @Test @@ -101,10 +122,15 @@ public void testDecimalType7() throws ParseException { String query = "create table `dec` (d decimal(7,33,4))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("missing ) at ',' near ',' in column name or constraint")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("missing ) at ',' near ',' in column name or constraint")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } @Test @@ -112,10 +138,15 @@ public void testDecimalType8() throws ParseException { String query = "create table `dec` (d decimal(7a))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("mismatched input '7a' expecting Number near '('")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("mismatched input '7a' expecting Number near '('")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } @Test @@ -123,10 +154,15 @@ public void testDecimalType9() throws ParseException { String query = "create table `dec` (d decimal(20,23))"; Driver driver = createDriver(); - int rc = driver.compile(query, true); - Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0); - Assert.assertTrue(driver.getErrorMsg(), - driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision")); + try { + driver.compile(query, true, false); + } catch (CommandProcessorResponse cpe) { + Assert.assertTrue("Got " + cpe.getResponseCode() + ", expected not zero", cpe.getResponseCode() != 0); + Assert.assertTrue(cpe.getErrorMessage(), + cpe.getErrorMessage().contains("Decimal scale must be less than or equal to precision")); + return; + } + Assert.assertTrue("Expected to receive an exception", false); } private Driver createDriver() {