Index: serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java =================================================================== --- serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (revision 919266) +++ serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (working copy) @@ -20,11 +20,15 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.lang.reflect.Type; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory.ObjectInspectorOptions; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector; @@ -37,6 +41,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.Text; +import org.apache.hadoop.util.StringUtils; /** * ObjectInspectorFactory is the primary way to create new ObjectInspector @@ -47,6 +52,8 @@ */ public final class ObjectInspectorUtils { + protected final static Log LOG = LogFactory.getLog(ObjectInspectorUtils.class.getName()); + /** * This enum controls how we copy primitive objects. * @@ -568,6 +575,20 @@ return sb.toString(); } + /** + * Get the type name of the Java class. + */ + public static String getTypeNameFromJavaClass(Type t) { + try { + ObjectInspector oi = ObjectInspectorFactory.getReflectionObjectInspector(t, + ObjectInspectorOptions.JAVA); + return oi.getTypeName(); + } catch (Throwable e) { + LOG.info(StringUtils.stringifyException(e)); + return "unknown"; + } + } + private ObjectInspectorUtils() { // prevent instantiation } Index: ql/src/test/results/clientnegative/fs_default_name2.q.out =================================================================== --- ql/src/test/results/clientnegative/fs_default_name2.q.out (revision 919266) +++ ql/src/test/results/clientnegative/fs_default_name2.q.out (working copy) @@ -1 +1,44 @@ -FAILED: Unknown exception: Error while making MR scratch directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com) +FAILED: Hive Internal Error: java.lang.RuntimeException(Error while making MR scratch directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com)) +java.lang.RuntimeException: Error while making MR scratch directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com) + at org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:205) + at org.apache.hadoop.hive.ql.Context.getMRTmpFileURI(Context.java:281) + at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.getMetaData(SemanticAnalyzer.java:795) + at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:5912) + at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:128) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:304) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:377) + at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:138) + at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:197) + at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:502) + at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_fs_default_name2(TestNegativeCliDriver.java:1594) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) + at java.lang.reflect.Method.invoke(Method.java:597) + at junit.framework.TestCase.runTest(TestCase.java:154) + at junit.framework.TestCase.runBare(TestCase.java:127) + at junit.framework.TestResult$1.protect(TestResult.java:106) + at junit.framework.TestResult.runProtected(TestResult.java:124) + at junit.framework.TestResult.run(TestResult.java:109) + at junit.framework.TestCase.run(TestCase.java:118) + at junit.framework.TestSuite.runTest(TestSuite.java:208) + at junit.framework.TestSuite.run(TestSuite.java:203) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768) +Caused by: java.lang.IllegalArgumentException + at java.net.URI.create(URI.java:842) + at org.apache.hadoop.fs.FileSystem.getDefaultUri(FileSystem.java:116) + at org.apache.hadoop.hive.common.FileUtils.makeQualified(FileUtils.java:56) + at org.apache.hadoop.hive.ql.Context.makeMRScratchDir(Context.java:123) + at org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:199) + ... 25 more +Caused by: java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com + at java.net.URI$Parser.fail(URI.java:2809) + at java.net.URI$Parser.checkChars(URI.java:2982) + at java.net.URI$Parser.checkChar(URI.java:2992) + at java.net.URI$Parser.parse(URI.java:3008) + at java.net.URI.(URI.java:578) + at java.net.URI.create(URI.java:840) + ... 29 more + Index: ql/src/test/results/clientnegative/wrong_column_type.q.out =================================================================== --- ql/src/test/results/clientnegative/wrong_column_type.q.out (revision 0) +++ ql/src/test/results/clientnegative/wrong_column_type.q.out (revision 0) @@ -0,0 +1,6 @@ +PREHOOK: query: CREATE TABLE dest1(a float) +PREHOOK: type: CREATETABLE +POSTHOOK: query: CREATE TABLE dest1(a float) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: default@dest1 +FAILED: Error in semantic analysis: No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) Index: ql/src/test/results/clientnegative/no_matching_udf.q.out =================================================================== --- ql/src/test/results/clientnegative/no_matching_udf.q.out (revision 0) +++ ql/src/test/results/clientnegative/no_matching_udf.q.out (revision 0) @@ -0,0 +1 @@ +FAILED: Error in semantic analysis: No matching method for class org.apache.hadoop.hive.ql.udf.UDAFPercentile with (double, double). Possible choices: _FUNC_(bigint, array) _FUNC_(bigint, double) Index: ql/src/test/results/clientnegative/fs_default_name1.q.out =================================================================== --- ql/src/test/results/clientnegative/fs_default_name1.q.out (revision 919266) +++ ql/src/test/results/clientnegative/fs_default_name1.q.out (working copy) @@ -1 +1,44 @@ -FAILED: Unknown exception: Error while making local scratch directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com) +FAILED: Hive Internal Error: java.lang.RuntimeException(Error while making local scratch directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com)) +java.lang.RuntimeException: Error while making local scratch directory - check filesystem config (java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com) + at org.apache.hadoop.hive.ql.Context.getLocalScratchDir(Context.java:222) + at org.apache.hadoop.hive.ql.Context.getLocalTmpFileURI(Context.java:290) + at org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.analyzeInternal(DDLSemanticAnalyzer.java:101) + at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:128) + at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:304) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:377) + at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:138) + at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:197) + at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:502) + at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_fs_default_name1(TestNegativeCliDriver.java:1564) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) + at java.lang.reflect.Method.invoke(Method.java:597) + at junit.framework.TestCase.runTest(TestCase.java:154) + at junit.framework.TestCase.runBare(TestCase.java:127) + at junit.framework.TestResult$1.protect(TestResult.java:106) + at junit.framework.TestResult.runProtected(TestResult.java:124) + at junit.framework.TestResult.run(TestResult.java:109) + at junit.framework.TestCase.run(TestCase.java:118) + at junit.framework.TestSuite.runTest(TestSuite.java:208) + at junit.framework.TestSuite.run(TestSuite.java:203) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768) +Caused by: java.lang.IllegalArgumentException + at java.net.URI.create(URI.java:842) + at org.apache.hadoop.fs.FileSystem.getDefaultUri(FileSystem.java:116) + at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:196) + at org.apache.hadoop.fs.FileSystem.getLocal(FileSystem.java:179) + at org.apache.hadoop.hive.ql.Context.makeLocalScratchDir(Context.java:162) + at org.apache.hadoop.hive.ql.Context.getLocalScratchDir(Context.java:216) + ... 24 more +Caused by: java.net.URISyntaxException: Illegal character in scheme name at index 0: 'http://www.example.com + at java.net.URI$Parser.fail(URI.java:2809) + at java.net.URI$Parser.checkChars(URI.java:2982) + at java.net.URI$Parser.checkChar(URI.java:2992) + at java.net.URI$Parser.parse(URI.java:3008) + at java.net.URI.(URI.java:578) + at java.net.URI.create(URI.java:840) + ... 29 more + Index: ql/src/test/results/clientnegative/bad_exec_hooks.q.out =================================================================== --- ql/src/test/results/clientnegative/bad_exec_hooks.q.out (revision 919266) +++ ql/src/test/results/clientnegative/bad_exec_hooks.q.out (working copy) @@ -1,2 +1,35 @@ Pre Exec Hook Class not found:"org.this.is.a.bad.class" -FAILED: Unknown exception : "org.this.is.a.bad.class" +FAILED: Hive Internal Error: java.lang.ClassNotFoundException("org.this.is.a.bad.class") +java.lang.ClassNotFoundException: "org.this.is.a.bad.class" + at java.net.URLClassLoader$1.run(URLClassLoader.java:200) + at java.security.AccessController.doPrivileged(Native Method) + at java.net.URLClassLoader.findClass(URLClassLoader.java:188) + at java.lang.ClassLoader.loadClass(ClassLoader.java:306) + at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:276) + at java.lang.ClassLoader.loadClass(ClassLoader.java:251) + at java.lang.ClassLoader.loadClassInternal(ClassLoader.java:319) + at java.lang.Class.forName0(Native Method) + at java.lang.Class.forName(Class.java:247) + at org.apache.hadoop.hive.ql.Driver.getPreExecHooks(Driver.java:402) + at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:463) + at org.apache.hadoop.hive.ql.Driver.run(Driver.java:382) + at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:138) + at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:197) + at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:502) + at org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_bad_exec_hooks(TestNegativeCliDriver.java:304) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) + at java.lang.reflect.Method.invoke(Method.java:597) + at junit.framework.TestCase.runTest(TestCase.java:154) + at junit.framework.TestCase.runBare(TestCase.java:127) + at junit.framework.TestResult$1.protect(TestResult.java:106) + at junit.framework.TestResult.runProtected(TestResult.java:124) + at junit.framework.TestResult.run(TestResult.java:109) + at junit.framework.TestCase.run(TestCase.java:118) + at junit.framework.TestSuite.runTest(TestSuite.java:208) + at junit.framework.TestSuite.run(TestSuite.java:203) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911) + at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768) + Index: ql/src/test/results/compiler/errors/invalid_function_param2.q.out =================================================================== --- ql/src/test/results/compiler/errors/invalid_function_param2.q.out (revision 919266) +++ ql/src/test/results/compiler/errors/invalid_function_param2.q.out (working copy) @@ -1,2 +1,2 @@ Semantic Exception: -line 2:36 Function Argument Type Mismatch substr: Looking for UDF "substr" with parameters [class org.apache.hadoop.io.Text, class org.apache.hadoop.io.Text] \ No newline at end of file +line 2:36 Wrong Arguments 'abc': No matching method for class org.apache.hadoop.hive.ql.udf.UDFSubstr with (string, string). Possible choices: _FUNC_(string, int, int) _FUNC_(string, int) \ No newline at end of file Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (revision 919266) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (working copy) @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; @@ -214,7 +215,7 @@ } @SuppressWarnings("unchecked") - private void populateMapRedPlan1(Table src) { + private void populateMapRedPlan1(Table src) throws SemanticException { mr.setNumReduceTasks(Integer.valueOf(1)); ArrayList outputColumns = new ArrayList(); @@ -242,7 +243,7 @@ } @SuppressWarnings("unchecked") - private void populateMapRedPlan2(Table src) { + private void populateMapRedPlan2(Table src) throws SemanticException { mr.setNumReduceTasks(Integer.valueOf(1)); ArrayList outputColumns = new ArrayList(); for (int i = 0; i < 2; i++) { @@ -275,7 +276,7 @@ * test reduce with multiple tagged inputs. */ @SuppressWarnings("unchecked") - private void populateMapRedPlan3(Table src, Table src2) { + private void populateMapRedPlan3(Table src, Table src2) throws SemanticException { mr.setNumReduceTasks(Integer.valueOf(5)); mr.setNeedsTagging(true); ArrayList outputColumns = new ArrayList(); @@ -316,7 +317,7 @@ } @SuppressWarnings("unchecked") - private void populateMapRedPlan4(Table src) { + private void populateMapRedPlan4(Table src) throws SemanticException { mr.setNumReduceTasks(Integer.valueOf(1)); // map-side work @@ -359,7 +360,7 @@ } @SuppressWarnings("unchecked") - private void populateMapRedPlan5(Table src) { + private void populateMapRedPlan5(Table src) throws SemanticException { mr.setNumReduceTasks(Integer.valueOf(1)); // map-side work @@ -391,7 +392,7 @@ } @SuppressWarnings("unchecked") - private void populateMapRedPlan6(Table src) { + private void populateMapRedPlan6(Table src) throws SemanticException { mr.setNumReduceTasks(Integer.valueOf(1)); // map-side work Index: ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (revision 919266) +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (working copy) @@ -753,6 +753,13 @@ "-I", "lastAccessTime", "-I", "owner", "-I", "transient_lastDdlTime", + "-I", "java.lang.RuntimeException", + "-I", "at org", + "-I", "at sun", + "-I", "at java", + "-I", "at junit", + "-I", "Caused by:", + "-I", "[.][.][.] [0-9]* more", (new File(logDir, tname + ".out")).getPath(), (new File(outDir, tname + ".out")).getPath()}; @@ -828,7 +835,7 @@ System.err.println("Query file " + fname + " failed with exception " + e.getMessage()); e.printStackTrace(); - System.err.flush(); + outputTestFailureHelpMessage(); } } } @@ -890,6 +897,7 @@ failed = true; System.err.println("Test " + qfiles[i].getName() + " results check failed with error code " + ecode); + outputTestFailureHelpMessage(); } } @@ -903,6 +911,7 @@ failed = true; System.err.println("Test " + qfiles[i].getName() + " results check failed with error code " + ecode); + outputTestFailureHelpMessage(); } } } @@ -912,4 +921,10 @@ } return (!failed); } + + public static void outputTestFailureHelpMessage() { + System.err.println("See build/ql/tmp/hive.log, " + + "or try \"ant test ... -Dtest.silent=false\" to get more logs."); + System.err.flush(); + } } Index: ql/src/test/queries/clientnegative/no_matching_udf.q =================================================================== --- ql/src/test/queries/clientnegative/no_matching_udf.q (revision 0) +++ ql/src/test/queries/clientnegative/no_matching_udf.q (revision 0) @@ -0,0 +1 @@ +SELECT percentile(3.5, 0.99) FROM src; Index: ql/src/test/queries/clientnegative/wrong_column_type.q =================================================================== --- ql/src/test/queries/clientnegative/wrong_column_type.q (revision 0) +++ ql/src/test/queries/clientnegative/wrong_column_type.q (revision 0) @@ -0,0 +1,4 @@ +CREATE TABLE dest1(a float); + +INSERT OVERWRITE TABLE dest1 +SELECT array(1.0,2.0) FROM src; Index: ql/src/test/templates/TestCliDriver.vm =================================================================== --- ql/src/test/templates/TestCliDriver.vm (revision 919266) +++ ql/src/test/templates/TestCliDriver.vm (working copy) @@ -71,6 +71,9 @@ return suite; } + static String debugHint = "\nSee build/ql/tmp/hive.log, " + + "or try \"ant test ... -Dtest.silent=false\" to get more logs."; + #foreach ($qf in $qfiles) #set ($fname = $qf.getName()) #set ($eidx = $fname.length() - 2) @@ -84,7 +87,7 @@ qt.cliInit("$fname"); int ecode = qt.executeClient("$fname"); if (ecode != 0) { - fail("Client Execution failed with error code = " + ecode); + fail("Client Execution failed with error code = " + ecode + debugHint); } if (SessionState.get() != null) { HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get() @@ -97,21 +100,22 @@ QueryInfo ji = jobInfoMap.get(cmd); if (!ji.hm.get(Keys.QUERY_RET_CODE.name()).equals("0")) { - fail("Wrong return code in hive history"); + fail("Wrong return code in hive history" + debugHint); } } } ecode = qt.checkCliDriverResults("$fname"); if (ecode != 0) { - fail("Client execution results failed with error code = " + ecode); + fail("Client execution results failed with error code = " + ecode + + debugHint); } } catch (Throwable e) { System.out.println("Exception: " + e.getMessage()); e.printStackTrace(); System.out.flush(); - fail("Unexpected exception"); + fail("Unexpected exception" + debugHint); } System.out.println("Done query: " + "$fname"); Index: ql/src/test/templates/TestNegativeCliDriver.vm =================================================================== --- ql/src/test/templates/TestNegativeCliDriver.vm (revision 919266) +++ ql/src/test/templates/TestNegativeCliDriver.vm (working copy) @@ -46,6 +46,9 @@ return suite; } + static String debugHint = "\nSee build/ql/tmp/hive.log, " + + "or try \"ant test ... -Dtest.silent=false\" to get more logs."; + #foreach ($qf in $qfiles) #set ($fname = $qf.getName()) #set ($eidx = $fname.length() - 2) @@ -59,19 +62,21 @@ qt.cliInit("$fname"); int ecode = qt.executeClient("$fname"); if (ecode == 0) { - fail("Client Execution failed with error code = " + ecode); + fail("Client Execution failed with error code = " + ecode + + debugHint); } ecode = qt.checkCliDriverResults("$fname"); if (ecode != 0) { - fail("Client execution results failed with error code = " + ecode); + fail("Client execution results failed with error code = " + ecode + + debugHint); } } catch (Throwable e) { System.out.println("Exception: " + e.getMessage()); e.printStackTrace(); System.out.flush(); - fail("Unexpected exception"); + fail("Unexpected exception" + debugHint); } System.out.println("Done query: " + "$fname"); Index: ql/src/test/templates/TestParse.vm =================================================================== --- ql/src/test/templates/TestParse.vm (revision 919266) +++ ql/src/test/templates/TestParse.vm (working copy) @@ -43,6 +43,9 @@ return suite; } + static String debugHint = "\nSee build/ql/tmp/hive.log, " + + "or try \"ant test ... -Dtest.silent=false\" to get more logs."; + #foreach ($qf in $qfiles) #set ($fname = $qf.getName()) #set ($eidx = $fname.length() - 2) @@ -57,12 +60,13 @@ ASTNode tree = qt.parseQuery("$fname"); int ecode = qt.checkParseResults("$fname", tree); if (ecode != 0) { - fail("Parse has unexpected out with error code = " + ecode); + fail("Parse has unexpected out with error code = " + ecode + debugHint); } List> tasks = qt.analyzeAST(tree); ecode = qt.checkPlan("$fname", tasks); if (ecode != 0) { - fail("Semantic Analysis has unexpected output with error code = " + ecode); + fail("Semantic Analysis has unexpected output with error code = " + ecode + + debugHint); } System.out.println("Done query: " + "$fname"); } @@ -70,7 +74,7 @@ System.out.println("Exception: " + e.getMessage()); e.printStackTrace(); System.out.flush(); - fail("Unexpected exception"); + fail("Unexpected exception" + debugHint); } assertTrue("Test passed", true); Index: ql/src/test/templates/TestParseNegative.vm =================================================================== --- ql/src/test/templates/TestParseNegative.vm (revision 919266) +++ ql/src/test/templates/TestParseNegative.vm (working copy) @@ -43,6 +43,9 @@ return suite; } + static String debugHint = "\nSee build/ql/tmp/hive.log, " + + "or try \"ant test ... -Dtest.silent=false\" to get more logs."; + #foreach ($qf in $qfiles) #set ($fname = $qf.getName()) #set ($eidx = $fname.length() - 2) @@ -56,25 +59,25 @@ qt.init("$fname"); ASTNode tree = qt.parseQuery("$fname"); List> tasks = qt.analyzeAST(tree); - fail("Unexpected success for query: " + "$fname"); + fail("Unexpected success for query: " + "$fname" + debugHint); } catch (ParseException pe) { int ecode = qt.checkNegativeResults("$fname", pe); if (ecode != 0) { - fail("failed with error code = " + ecode); + fail("failed with error code = " + ecode + debugHint); } } catch (SemanticException se) { int ecode = qt.checkNegativeResults("$fname", se); if (ecode != 0) { - fail("failed with error code = " + ecode); + fail("failed with error code = " + ecode + debugHint); } } catch (Throwable e) { System.out.println("Exception: " + e.getMessage()); e.printStackTrace(); System.out.flush(); - fail("Unexpected exception"); + fail("Unexpected exception" + debugHint); } System.out.println("Done query: " + "$fname"); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy) @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UnionOperator; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; @@ -129,7 +130,7 @@ } private void createMergeJob(FileSinkOperator fsOp, GenMRProcContext ctx, - String finalName) { + String finalName) throws SemanticException { Task currTask = ctx.getCurrTask(); RowSchema fsRS = fsOp.getSchema(); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/OpProcFactory.java (working copy) @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -97,7 +98,7 @@ } private void addPruningPred(Map opToPPR, - TableScanOperator top, ExprNodeDesc new_ppr_pred) { + TableScanOperator top, ExprNodeDesc new_ppr_pred) throws UDFArgumentException { ExprNodeDesc old_ppr_pred = opToPPR.get(top); ExprNodeDesc ppr_pred = null; if (old_ppr_pred != null) { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/NoMatchingMethodException.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/NoMatchingMethodException.java (revision 0) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/NoMatchingMethodException.java (revision 0) @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.exec; + +import java.lang.reflect.Method; +import java.util.List; + +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +/** + * Exception thrown by the UDF and UDAF method resolvers in case no matching method + * is found. + * + */ +public class NoMatchingMethodException extends UDFArgumentException { + + private static final long serialVersionUID = 1L; + + /** + * Constructor. + * + * @param funcClass + * The UDF or UDAF class. + * @param argTypeInfos + * The list of argument types that lead to an ambiguity. + * @param methods + * All potential matches. + */ + public NoMatchingMethodException(Class funcClass, + List argTypeInfos, List methods) { + super("No matching method for " + funcClass + " with " + + argTypeInfos.toString().replace('[', '(').replace(']', ')'), + funcClass, argTypeInfos, methods); + } +} Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDAFEvaluatorResolver.java (working copy) @@ -41,6 +41,6 @@ * Gets the evaluator class corresponding to the passed parameter list. */ Class getEvaluatorClass(List argClasses) - throws AmbiguousMethodException; + throws UDFArgumentException; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/UDFArgumentException.java (working copy) @@ -18,7 +18,14 @@ package org.apache.hadoop.hive.ql.exec; +import java.lang.reflect.Method; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; + import org.apache.hadoop.hive.ql.parse.SemanticException; +import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; /** * exception class, thrown when udf argument have something wrong. @@ -37,4 +44,76 @@ super(cause); } + + /** + * Constructor. + * + * @param funcClass + * The UDF or UDAF class. + * @param argTypeInfos + * The list of argument types that lead to an ambiguity. + * @param methods + * All potential matches. + */ + public UDFArgumentException(String message, + Class funcClass, + List argTypeInfos, + List methods) { + super(getMessage(message, funcClass, argTypeInfos, methods)); + this.funcClass = funcClass; + this.argTypeInfos = argTypeInfos; + this.methods = methods; + } + + private static String getMessage(String message, + Class funcClass, + List argTypeInfos, + List methods) { + StringBuilder sb = new StringBuilder(); + sb.append(message); + if (methods != null) { + sb.append(". Possible choices: "); + for (Method m: methods) { + Type[] types = m.getGenericParameterTypes(); + sb.append("_FUNC_("); + List typeNames = new ArrayList(types.length); + for (int t = 0; t < types.length; t++) { + if (t > 0) { + sb.append(", "); + } + sb.append(ObjectInspectorUtils.getTypeNameFromJavaClass(types[t])); + } + sb.append(") "); + } + } + return sb.toString(); + } + + /** + * The UDF or UDAF class that has the ambiguity. + */ + private Class funcClass; + + /** + * The list of parameter types. + */ + private List argTypeInfos; + + /** + * The list of matched methods. + */ + private List methods; + + public Class getFunctionClass() { + return funcClass; + } + + public List getArgTypeList() { + return argTypeInfos; + } + + public List getMethods() { + return methods; + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDFMethodResolver.java (working copy) @@ -55,12 +55,8 @@ * evaluate function signature. */ @Override - public Method getEvalMethod(List argClasses) throws AmbiguousMethodException { - Method m = FunctionRegistry.getMethodInternal(udfClass, "evaluate", false, + public Method getEvalMethod(List argClasses) throws UDFArgumentException { + return FunctionRegistry.getMethodInternal(udfClass, "evaluate", false, argClasses); - if (m == null) { - throw new AmbiguousMethodException(udfClass, argClasses); - } - return m; } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/AmbiguousMethodException.java (working copy) @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec; +import java.lang.reflect.Method; import java.util.List; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -29,41 +30,22 @@ */ public class AmbiguousMethodException extends UDFArgumentException { - /** - * - */ private static final long serialVersionUID = 1L; /** - * The UDF or UDAF class that has the ambiguity. - */ - Class funcClass; - - /** - * The list of parameter types. - */ - List argTypeInfos; - - /** * Constructor. * * @param funcClass * The UDF or UDAF class. * @param argTypeInfos * The list of argument types that lead to an ambiguity. + * @param methods + * All potential matches. */ public AmbiguousMethodException(Class funcClass, - List argTypeInfos) { - super("Ambiguous method for " + funcClass + " with " + argTypeInfos); - this.funcClass = funcClass; - this.argTypeInfos = argTypeInfos; + List argTypeInfos, List methods) { + super("Ambiguous method for " + funcClass + " with " + + argTypeInfos.toString().replace('[', '(').replace(']', ')'), + funcClass, argTypeInfos, methods); } - - Class getFunctionClass() { - return funcClass; - } - - List getArgTypeList() { - return argTypeInfos; - } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (working copy) @@ -605,9 +605,11 @@ * This method is shared between UDFRegistry and UDAFRegistry. methodName will * be "evaluate" for UDFRegistry, and "aggregate"/"evaluate"/"evaluatePartial" * for UDAFRegistry. + * @throws UDFArgumentException */ public static Method getMethodInternal(Class udfClass, - String methodName, boolean exact, List argumentClasses) { + String methodName, boolean exact, List argumentClasses) + throws UDFArgumentException { List mlist = new ArrayList(); @@ -617,7 +619,7 @@ } } - return getMethodInternal(mlist, exact, argumentClasses); + return getMethodInternal(udfClass, mlist, exact, argumentClasses); } public static void registerTemporaryGenericUDAF(String functionName, @@ -775,11 +777,14 @@ * The classes for the argument. * @return The matching method. */ - public static Method getMethodInternal(List mlist, boolean exact, - List argumentsPassed) { + public static Method getMethodInternal(Class udfClass, List mlist, boolean exact, + List argumentsPassed) throws UDFArgumentException { + + // result + List udfMethods = new ArrayList(); + // The cost of the result int leastConversionCost = Integer.MAX_VALUE; - Method udfMethod = null; - + for (Method m : mlist) { List argumentsAccepted = TypeInfoUtils.getParameterTypeInfos(m, argumentsPassed.size()); @@ -806,7 +811,8 @@ if (match) { // Always choose the function with least implicit conversions. if (conversionCost < leastConversionCost) { - udfMethod = m; + udfMethods.clear(); + udfMethods.add(m); leastConversionCost = conversionCost; // Found an exact match if (leastConversionCost == 0) { @@ -815,16 +821,23 @@ } else if (conversionCost == leastConversionCost) { // Ambiguous call: two methods with the same number of implicit // conversions - LOG.info("Ambigious methods: passed = " + argumentsPassed - + " method 1 = " + udfMethod + " method 2 = " + m); - udfMethod = null; + udfMethods.add(m); // Don't break! We might find a better match later. } else { // do nothing if implicitConversions > leastImplicitConversions } } } - return udfMethod; + + if (udfMethods.size() == 0) { + // No matching methods found + throw new NoMatchingMethodException(udfClass, argumentsPassed, mlist); + } + if (udfMethods.size() > 1) { + // Ambiguous method found + throw new AmbiguousMethodException(udfClass, argumentsPassed, mlist); + } + return udfMethods.get(0); } /** Index: ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/NumericOpMethodResolver.java (working copy) @@ -121,7 +121,8 @@ if (match) { if (udfMethod != null) { - throw new AmbiguousMethodException(udfClass, argTypeInfos); + throw new AmbiguousMethodException(udfClass, argTypeInfos, + Arrays.asList(new Method[]{udfMethod, m})); } else { udfMethod = m; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ComparisonOpMethodResolver.java (working copy) @@ -96,7 +96,8 @@ if (match) { if (udfMethod != null) { - throw new AmbiguousMethodException(udfClass, argTypeInfos); + throw new AmbiguousMethodException(udfClass, argTypeInfos, + Arrays.asList(new Method[]{udfMethod, m})); } else { udfMethod = m; } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAFEvaluatorResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAFEvaluatorResolver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/NumericUDAFEvaluatorResolver.java (working copy) @@ -47,7 +47,7 @@ */ @Override public Class getEvaluatorClass( - List argTypeInfos) throws AmbiguousMethodException { + List argTypeInfos) throws UDFArgumentException { // Go through the argClasses and for any string, void or date time, start // looking for doubles ArrayList args = new ArrayList(); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultUDAFEvaluatorResolver.java (working copy) @@ -20,6 +20,7 @@ import java.lang.reflect.Method; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; @@ -55,7 +56,7 @@ * The list of the parameter types. */ public Class getEvaluatorClass( - List argClasses) throws AmbiguousMethodException { + List argClasses) throws UDFArgumentException { ArrayList> classList = new ArrayList>(); @@ -80,10 +81,7 @@ } } - Method m = FunctionRegistry.getMethodInternal(mList, false, argClasses); - if (m == null) { - throw new AmbiguousMethodException(udafClass, argClasses); - } + Method m = FunctionRegistry.getMethodInternal(udafClass, mList, false, argClasses); // Find the class that has this method. // Note that Method.getDeclaringClass() may not work here because the method @@ -94,7 +92,7 @@ if (found == -1) { found = i; } else { - throw new AmbiguousMethodException(udafClass, argClasses); + throw new AmbiguousMethodException(udafClass, null, null); } } } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (working copy) @@ -24,12 +24,10 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; import java.util.Map.Entry; -import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.BucketMatcher; +import org.apache.hadoop.hive.ql.exec.Operator; /** * MapredLocalWork. Index: ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (working copy) @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory; import org.apache.hadoop.hive.serde.Constants; import org.apache.hadoop.hive.serde2.Deserializer; @@ -455,7 +456,7 @@ public static ReduceSinkDesc getReduceSinkDesc( ArrayList keyCols, ArrayList valueCols, List outputColumnNames, boolean includeKey, int tag, - int numPartitionFields, int numReducers) { + int numPartitionFields, int numReducers) throws SemanticException { ArrayList partitionCols = null; if (numPartitionFields >= keyCols.size()) { Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy) @@ -454,25 +454,6 @@ } /** - * Get the exprNodeDesc. - * - * @param name - * @param children - * @return The expression node descriptor - * @throws UDFArgumentException - */ - public static ExprNodeDesc getFuncExprNodeDesc(String name, - ExprNodeDesc... children) { - ArrayList c = new ArrayList(Arrays - .asList(children)); - try { - return getFuncExprNodeDesc(name, c); - } catch (UDFArgumentException e) { - throw new RuntimeException("Hive 2 internal error", e); - } - } - - /** * This function create an ExprNodeDesc for a UDF function given the * children (arguments). It will insert implicit type conversion functions * if necessary. @@ -480,20 +461,22 @@ * @throws UDFArgumentException */ public static ExprNodeDesc getFuncExprNodeDesc(String udfName, - List children) throws UDFArgumentException { + ExprNodeDesc... children) throws UDFArgumentException { FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName); if (fi == null) { - throw new UDFArgumentException("udf:" + udfName + " not found."); + throw new UDFArgumentException(udfName + " not found."); } GenericUDF genericUDF = fi.getGenericUDF(); if (genericUDF == null) { - throw new UDFArgumentException("udf:" + udfName - + " is an aggregation function."); + throw new UDFArgumentException(udfName + + " is an aggregation function or a table function."); } - return ExprNodeGenericFuncDesc.newInstance(genericUDF, children); + List childrenList = new ArrayList(children.length); + childrenList.addAll(Arrays.asList(children)); + return ExprNodeGenericFuncDesc.newInstance(genericUDF, childrenList); } static ExprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, @@ -546,7 +529,7 @@ TypeInfo myt = children.get(0).getTypeInfo(); if (myt.getCategory() == Category.LIST) { - // Only allow constant integer index for now + // Only allow integer index for now if (!(children.get(1) instanceof ExprNodeConstantDesc) || !(((ExprNodeConstantDesc) children.get(1)).getTypeInfo() .equals(TypeInfoFactory.intTypeInfo))) { @@ -559,7 +542,7 @@ desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry .getGenericUDFForIndex(), children); } else if (myt.getCategory() == Category.MAP) { - // Only allow only constant indexes for now + // Only allow constant map key for now if (!(children.get(1) instanceof ExprNodeConstantDesc)) { throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT .getMsg(expr)); @@ -601,28 +584,7 @@ throw new SemanticException(ErrorMsg.UDTF_INVALID_LOCATION.getMsg()); } - try { - desc = getFuncExprNodeDesc(funcText, children); - } catch (AmbiguousMethodException e) { - ArrayList> argumentClasses = new ArrayList>( - children.size()); - for (int i = 0; i < children.size(); i++) { - argumentClasses.add(((PrimitiveTypeInfo) children.get(i) - .getTypeInfo()).getPrimitiveWritableClass()); - } - - if (isFunction) { - String reason = "Looking for UDF \"" + expr.getChild(0).getText() - + "\" with parameters " + argumentClasses; - throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE - .getMsg((ASTNode) expr.getChild(0), reason), e); - } else { - String reason = "Looking for Operator \"" + expr.getText() - + "\" with parameters " + argumentClasses; - throw new SemanticException(ErrorMsg.INVALID_OPERATOR_SIGNATURE - .getMsg(expr, reason), e); - } - } + desc = ExprNodeGenericFuncDesc.newInstance(fi.getGenericUDF(), children); } // UDFOPPositive is a no-op. // However, we still create it, and then remove it here, to make sure we Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -3408,7 +3408,8 @@ column = null; } else { column = TypeCheckProcFactory.DefaultExprProcessor - .getFuncExprNodeDesc(tableFieldTypeInfo.getTypeName(), column); + .getFuncExprNodeDesc(tableFieldTypeInfo.getTypeName(), + column); } if (column == null) { String reason = "Cannot convert column " + i + " from " @@ -3626,7 +3627,8 @@ column = null; } else { column = TypeCheckProcFactory.DefaultExprProcessor - .getFuncExprNodeDesc(tableFieldTypeInfo.getTypeName(), column); + .getFuncExprNodeDesc(tableFieldTypeInfo.getTypeName(), + column); } if (column == null) { String reason = "Cannot convert column " + posn + " from " Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 919266) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -355,9 +355,9 @@ + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (11); } catch (Exception e) { - errorMessage = "FAILED: Unknown exception: " + e.getMessage(); + errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = ErrorMsg.findSQLState(e.getMessage()); - console.printError(errorMessage, "\n" + console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (12); } @@ -555,9 +555,9 @@ Keys.QUERY_RET_CODE, String.valueOf(12)); } // TODO: do better with handling types of Exception here - errorMessage = "FAILED: Unknown exception : " + e.getMessage(); + errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); SQLState = "08S01"; - console.printError(errorMessage, "\n" + console.printError(errorMessage + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (12); } finally { @@ -732,7 +732,7 @@ try { ctx.clear(); } catch (Exception e) { - console.printError("FAILED: Unknown exception : " + e.getMessage(), "\n" + console.printError("FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return 13; }