Index: jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java =================================================================== --- jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (revision 1336457) +++ jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java (working copy) @@ -496,7 +496,6 @@ public void testErrorMessages() throws SQLException { String invalidSyntaxSQLState = "42000"; - int parseErrorCode = 10; // These tests inherently cause exceptions to be written to the test output // logs. This is undesirable, since you it might appear to someone looking @@ -504,27 +503,23 @@ // sure // how to get around that. doTestErrorCase("SELECTT * FROM " + tableName, - "cannot recognize input near 'SELECTT' '*' 'FROM'", invalidSyntaxSQLState, 11); + "cannot recognize input near 'SELECTT' '*' 'FROM'", + invalidSyntaxSQLState, 40000); doTestErrorCase("SELECT * FROM some_table_that_does_not_exist", - "Table not found", "42000", parseErrorCode); + "Table not found", "42S02", 10001); doTestErrorCase("drop table some_table_that_does_not_exist", - "Table not found", "42000", parseErrorCode); + "Table not found", "42S02", 10001); doTestErrorCase("SELECT invalid_column FROM " + tableName, - "Invalid table alias or column reference", invalidSyntaxSQLState, - parseErrorCode); + "Invalid table alias or column reference", invalidSyntaxSQLState, 10004); doTestErrorCase("SELECT invalid_function(under_col) FROM " + tableName, - "Invalid function", invalidSyntaxSQLState, parseErrorCode); + "Invalid function", invalidSyntaxSQLState, 10011); - // TODO: execute errors like this currently don't return good messages (i.e. - // 'Table already exists'). This is because the Driver class calls - // Task.executeTask() which swallows meaningful exceptions and returns a - // status - // code. This should be refactored. + // TODO: execute errors like this currently don't return good error + // codes and messages. This should be fixed. doTestErrorCase( "create table " + tableName + " (key int, value string)", - "Query returned non-zero code: 9, cause: FAILED: Execution Error, " - + "return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask", - "08S01", 9); + "FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask", + "08S01", 1); } private void doTestErrorCase(String sql, String expectedMessage, Index: contrib/src/test/results/clientnegative/udtf_explode2.q.out =================================================================== --- contrib/src/test/results/clientnegative/udtf_explode2.q.out (revision 1336457) +++ contrib/src/test/results/clientnegative/udtf_explode2.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: CREATEFUNCTION POSTHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2' POSTHOOK: type: CREATEFUNCTION -FAILED: Error in semantic analysis: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 2 aliases but got 1 +FAILED: SemanticException [Error 10083]: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 2 aliases but got 1 Index: contrib/src/test/results/clientnegative/invalid_row_sequence.q.out =================================================================== --- contrib/src/test/results/clientnegative/invalid_row_sequence.q.out (revision 1336457) +++ contrib/src/test/results/clientnegative/invalid_row_sequence.q.out (working copy) @@ -12,4 +12,4 @@ POSTHOOK: query: create temporary function row_sequence as 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence' POSTHOOK: type: CREATEFUNCTION -FAILED: Error in semantic analysis: Stateful UDF's can only be invoked in the SELECT list +FAILED: SemanticException [Error 10084]: Stateful UDF's can only be invoked in the SELECT list Index: ql/src/test/results/clientpositive/mapjoin_hook.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_hook.q.out (revision 1336457) +++ ql/src/test/results/clientpositive/mapjoin_hook.q.out (working copy) @@ -25,7 +25,7 @@ PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 PREHOOK: Output: default@dest1 -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -35,7 +35,7 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask [MapJoinCounter PostHook] CONVERTED_LOCAL_MAPJOIN: 1 CONVERTED_MAPJOIN: 0 LOCAL_MAPJOIN: 0 COMMON_JOIN: 0 BACKUP_COMMON_JOIN: 1 PREHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key) @@ -43,7 +43,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest1 -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -53,9 +53,9 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -65,6 +65,6 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask [MapJoinCounter PostHook] CONVERTED_LOCAL_MAPJOIN: 2 CONVERTED_MAPJOIN: 0 LOCAL_MAPJOIN: 0 COMMON_JOIN: 0 BACKUP_COMMON_JOIN: 2 Index: ql/src/test/results/clientpositive/auto_join25.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join25.q.out (revision 1336457) +++ ql/src/test/results/clientpositive/auto_join25.q.out (working copy) @@ -13,7 +13,7 @@ PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 PREHOOK: Output: default@dest1 -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -23,7 +23,7 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask POSTHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest1 SELECT src1.key, src2.value @@ -60,7 +60,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest_j2 -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -70,9 +70,9 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -82,7 +82,7 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) JOIN src src3 ON (src1.key + src2.key = src3.key) INSERT OVERWRITE TABLE dest_j2 SELECT src1.key, src3.value @@ -120,7 +120,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Output: default@dest_j1 -Execution failed with exit status: 2 +Execution failed with exit status: 3 Obtaining error information Task failed! @@ -130,7 +130,7 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapredLocalTask +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.MapredLocalTask ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.MapRedTask POSTHOOK: query: FROM src src1 JOIN src src2 ON (src1.key = src2.key) INSERT OVERWRITE TABLE dest_j1 SELECT src1.key, src2.value Index: ql/src/test/results/clientnegative/protectmode_tbl4.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_tbl4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_tbl4.q.out (working copy) @@ -59,4 +59,4 @@ p string #### A masked pattern was here #### -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_tbl4 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode_tbl4 Index: ql/src/test/results/clientnegative/protectmode_part.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_part.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_part.q.out (working copy) @@ -58,4 +58,4 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@tbl_protectmode3@p=p2 #### A masked pattern was here #### -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode3 Partition p=p1 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode3 Partition p=p1 Index: ql/src/test/results/clientnegative/invalid_t_create1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_t_create1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/duplicate_alias_in_transform_schema.q.out =================================================================== --- ql/src/test/results/clientnegative/duplicate_alias_in_transform_schema.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/duplicate_alias_in_transform_schema.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Column alias already exists: foo +FAILED: SemanticException [Error 10074]: Column alias already exists: foo Index: ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_12_nonnative_export.q.out (working copy) @@ -9,4 +9,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@exim_department -FAILED: Error in semantic analysis: Export/Import cannot be done for a non-native table. +FAILED: SemanticException [Error 10121]: Export/Import cannot be done for a non-native table. Index: ql/src/test/results/clientnegative/part_col_complex_type.q.out =================================================================== --- ql/src/test/results/clientnegative/part_col_complex_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/part_col_complex_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Partition column must be of primitive type. Found b of type: map +FAILED: SemanticException [Error 10126]: Partition column must be of primitive type. Found b of type: map Index: ql/src/test/results/clientnegative/create_view_failure7.q.out =================================================================== --- ql/src/test/results/clientnegative/create_view_failure7.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_view_failure7.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW xxx16 POSTHOOK: type: DROPVIEW -FAILED: Error in semantic analysis: At least one non-partitioning column must be present in view +FAILED: SemanticException [Error 10092]: At least one non-partitioning column must be present in view Index: ql/src/test/results/clientnegative/alter_view_failure6.q.out =================================================================== --- ql/src/test/results/clientnegative/alter_view_failure6.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/alter_view_failure6.q.out (working copy) @@ -15,5 +15,5 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@xxx7 #### A masked pattern was here #### -FAILED: Error in semantic analysis: No partition predicate found for Alias "xxx7:srcpart" Table "srcpart" -FAILED: Error in semantic analysis: The query does not reference any valid partition. To run this query, set hive.mapred.mode=nonstrict +FAILED: SemanticException [Error 10041]: No partition predicate found for Alias "xxx7:srcpart" Table "srcpart" +FAILED: SemanticException [Error 10056]: The query does not reference any valid partition. To run this query, set hive.mapred.mode=nonstrict Index: ql/src/test/results/clientnegative/merge_negative_2.q.out =================================================================== --- ql/src/test/results/clientnegative/merge_negative_2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/merge_negative_2.q.out (working copy) @@ -13,4 +13,4 @@ POSTHOOK: Output: default@srcpart2@ds=2011 POSTHOOK: Lineage: srcpart2 PARTITION(ds=2011).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart2 PARTITION(ds=2011).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: source table srcpart2 is partitioned but no partition desc found. +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: source table srcpart2 is partitioned but no partition desc found. Index: ql/src/test/results/clientnegative/compare_string_bigint.q.out =================================================================== --- ql/src/test/results/clientnegative/compare_string_bigint.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/compare_string_bigint.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments ''1'': In strict mode, comparing bigints and strings is not allowed, it may result in a loss of precision. If you really want to perform the operation, set hive.mapred.mode=nonstrict +FAILED: SemanticException Line 0:-1 Wrong arguments ''1'': In strict mode, comparing bigints and strings is not allowed, it may result in a loss of precision. If you really want to perform the operation, set hive.mapred.mode=nonstrict Index: ql/src/test/results/clientnegative/duplicate_insert1.q.out =================================================================== --- ql/src/test/results/clientnegative/duplicate_insert1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/duplicate_insert1.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table dest1_din1(key int, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1_din1 -FAILED: Error in semantic analysis: The same output cannot be present multiple times: dest1_din1 +FAILED: SemanticException [Error 10087]: The same output cannot be present multiple times: dest1_din1 Index: ql/src/test/results/clientnegative/clustern3.q.out =================================================================== --- ql/src/test/results/clientnegative/clustern3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clustern3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:52 Invalid column reference 'key' +FAILED: SemanticException [Error 10002]: Line 2:52 Invalid column reference 'key' Index: ql/src/test/results/clientnegative/invalid_cast_to_binary_1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_to_binary_1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_to_binary_1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. +FAILED: SemanticException Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. Index: ql/src/test/results/clientnegative/regex_col_1.q.out =================================================================== --- ql/src/test/results/clientnegative/regex_col_1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/regex_col_1.q.out (working copy) @@ -1,3 +1,3 @@ -FAILED: Error in semantic analysis: Line 2:7 Invalid column reference '`+++`': Dangling meta character '+' near index 0 +FAILED: SemanticException Line 2:7 Invalid column reference '`+++`': Dangling meta character '+' near index 0 +++ ^ Index: ql/src/test/results/clientnegative/joinneg.q.out =================================================================== --- ql/src/test/results/clientnegative/joinneg.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/joinneg.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 5:12 Invalid table alias 'b' +FAILED: SemanticException [Error 10009]: Line 5:12 Invalid table alias 'b' Index: ql/src/test/results/clientnegative/udf_elt_wrong_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_elt_wrong_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_elt_wrong_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:14 Argument type mismatch 'lintstring': The 2nd argument of function ELT is expected to a primitive type, but list is found +FAILED: SemanticException [Error 10016]: Line 2:14 Argument type mismatch 'lintstring': The 2nd argument of function ELT is expected to a primitive type, but list is found Index: ql/src/test/results/clientnegative/input41.q.out =================================================================== --- ql/src/test/results/clientnegative/input41.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/input41.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Schema of both sides of union should match. +FAILED: SemanticException Schema of both sides of union should match. Index: ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/drop_partition_filter_failure.q.out (working copy) @@ -15,4 +15,4 @@ POSTHOOK: query: show partitions ptestfilter1 POSTHOOK: type: SHOWPARTITIONS c=US/d=1 -FAILED: Error in semantic analysis: Partition not found c = 'US' AND d < 1 +FAILED: SemanticException [Error 10006]: Partition not found c = 'US' AND d < 1 Index: ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out =================================================================== --- ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/mapreduce_stack_trace_turnoff.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script. Index: ql/src/test/results/clientnegative/no_matching_udf.q.out =================================================================== --- ql/src/test/results/clientnegative/no_matching_udf.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/no_matching_udf.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: No matching method for class org.apache.hadoop.hive.ql.udf.UDAFPercentile with (double, double). Possible choices: _FUNC_(bigint, array) _FUNC_(bigint, double) +FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDAFPercentile with (double, double). Possible choices: _FUNC_(bigint, array) _FUNC_(bigint, double) Index: ql/src/test/results/clientnegative/invalid_stddev_samp_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_stddev_samp_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_stddev_samp_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/archive_insert3.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_insert3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_insert3.q.out (working copy) @@ -31,4 +31,4 @@ POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Insert conflict with existing archive: ds=2008-04-08 +FAILED: SemanticException Insert conflict with existing archive: ds=2008-04-08 Index: ql/src/test/results/clientnegative/load_non_native.q.out =================================================================== --- ql/src/test/results/clientnegative/load_non_native.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/load_non_native.q.out (working copy) @@ -5,4 +5,4 @@ STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@non_native2 -FAILED: Error in semantic analysis: A non-native table cannot be used as target for LOAD +FAILED: SemanticException [Error 10101]: A non-native table cannot be used as target for LOAD Index: ql/src/test/results/clientnegative/duplicate_alias_in_transform.q.out =================================================================== --- ql/src/test/results/clientnegative/duplicate_alias_in_transform.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/duplicate_alias_in_transform.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Column alias already exists: foo +FAILED: SemanticException [Error 10074]: Column alias already exists: foo Index: ql/src/test/results/clientnegative/archive4.q.out =================================================================== --- ql/src/test/results/clientnegative/archive4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: ARCHIVE can only be run on a single partition +FAILED: SemanticException [Error 10109]: ARCHIVE can only be run on a single partition Index: ql/src/test/results/clientnegative/udf_array_contains_wrong1.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_array_contains_wrong1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_array_contains_wrong1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:22 Argument type mismatch '1': "array" expected at function ARRAY_CONTAINS, but "int" is found +FAILED: SemanticException [Error 10016]: Line 2:22 Argument type mismatch '1': "array" expected at function ARRAY_CONTAINS, but "int" is found Index: ql/src/test/results/clientnegative/column_rename3.q.out =================================================================== --- ql/src/test/results/clientnegative/column_rename3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/column_rename3.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:27 cannot recognize input near '' '' '' in column type +FAILED: ParseException line 1:27 cannot recognize input near '' '' '' in column type Index: ql/src/test/results/clientnegative/default_partition_name.q.out =================================================================== --- ql/src/test/results/clientnegative/default_partition_name.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/default_partition_name.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table default_partition_name (key int, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@default_partition_name -FAILED: Error in semantic analysis: Partition value contains a reserved substring (User value: __HIVE_DEFAULT_PARTITION__ Reserved substring: __HIVE_DEFAULT_PARTITION__) +FAILED: SemanticException [Error 10111]: Partition value contains a reserved substring (User value: __HIVE_DEFAULT_PARTITION__ Reserved substring: __HIVE_DEFAULT_PARTITION__) Index: ql/src/test/results/clientnegative/clusterbysortby.q.out =================================================================== --- ql/src/test/results/clientnegative/clusterbysortby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clusterbysortby.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: 8:8 Cannot have both CLUSTER BY and SORT BY clauses. Error encountered near token 'one' +FAILED: SemanticException 8:8 Cannot have both CLUSTER BY and SORT BY clauses. Error encountered near token 'one' Index: ql/src/test/results/clientnegative/analyze1.q.out =================================================================== --- ql/src/test/results/clientnegative/analyze1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/analyze1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Non-Partition column appears in the partition specification: key +FAILED: SemanticException [Error 10098]: Non-Partition column appears in the partition specification: key Index: ql/src/test/results/clientnegative/protectmode_part1.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_part1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_part1.q.out (working copy) @@ -77,4 +77,4 @@ POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ] POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ] POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ] -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode5 Partition p=p1 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode5 Partition p=p1 Index: ql/src/test/results/clientnegative/udf_printf_wrong1.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_printf_wrong1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_printf_wrong1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:7 Arguments length mismatch 'printf': The function PRINTF(String format, Obj... args) needs at least one arguments. +FAILED: SemanticException [Error 10015]: Line 2:7 Arguments length mismatch 'printf': The function PRINTF(String format, Obj... args) needs at least one arguments. Index: ql/src/test/results/clientnegative/archive_partspec2.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_partspec2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_partspec2.q.out (working copy) @@ -21,4 +21,4 @@ POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (hr). +FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (hr). Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/input1.q.out =================================================================== --- ql/src/test/results/clientnegative/input1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/input1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Invalid table alias 'a' +FAILED: SemanticException [Error 10009]: Line 1:7 Invalid table alias 'a' Index: ql/src/test/results/clientnegative/load_view_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/load_view_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/load_view_failure.q.out (working copy) @@ -9,4 +9,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@xxx11 #### A masked pattern was here #### -FAILED: Error in semantic analysis: A view cannot be used as target table for LOAD or INSERT +FAILED: SemanticException [Error 10090]: A view cannot be used as target table for LOAD or INSERT Index: ql/src/test/results/clientnegative/create_table_failure2.q.out =================================================================== --- ql/src/test/results/clientnegative/create_table_failure2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_table_failure2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: Database does not exist: table_in_database_creation_not_exist +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Database does not exist: table_in_database_creation_not_exist Index: ql/src/test/results/clientnegative/groupby3_map_skew_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientnegative/groupby3_map_skew_multi_distinct.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/groupby3_map_skew_multi_distinct.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE, c10 DOUBLE, c11 DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: DISTINCT on different columns not supported with skew in data +FAILED: SemanticException [Error 10022]: DISTINCT on different columns not supported with skew in data Index: ql/src/test/results/clientnegative/create_or_replace_view4.q.out =================================================================== --- ql/src/test/results/clientnegative/create_or_replace_view4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_or_replace_view4.q.out (working copy) @@ -13,4 +13,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@v #### A masked pattern was here #### -FAILED: Error in semantic analysis: At least one non-partitioning column must be present in view +FAILED: SemanticException [Error 10092]: At least one non-partitioning column must be present in view Index: ql/src/test/results/clientnegative/drop_function_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_function_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/drop_function_failure.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Invalid function UnknownFunction +FAILED: SemanticException [Error 10011]: Invalid function UnknownFunction Index: ql/src/test/results/clientnegative/udf_min.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_min.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_min.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Cannot support comparison of map<> type or complex type containing map<>. +FAILED: UDFArgumentTypeException Cannot support comparison of map<> type or complex type containing map<>. Index: ql/src/test/results/clientnegative/strict_join.q.out =================================================================== --- ql/src/test/results/clientnegative/strict_join.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/strict_join.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict +FAILED: SemanticException [Error 10052]: In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict Index: ql/src/test/results/clientnegative/ambiguous_col.q.out =================================================================== --- ql/src/test/results/clientnegative/ambiguous_col.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/ambiguous_col.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Ambiguous column reference key +FAILED: SemanticException [Error 10007]: Ambiguous column reference key Index: ql/src/test/results/clientnegative/invalid_avg_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_avg_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_avg_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/invalid_create_tbl1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/udf_field_wrong_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_field_wrong_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_field_wrong_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:16 Argument type mismatch 'lintstring': The 2nd argument of function FIELD is expected to a primitive type, but list is found +FAILED: SemanticException [Error 10016]: Line 2:16 Argument type mismatch 'lintstring': The 2nd argument of function FIELD is expected to a primitive type, but list is found Index: ql/src/test/results/clientnegative/semijoin3.q.out =================================================================== --- ql/src/test/results/clientnegative/semijoin3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/semijoin3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:67 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) +FAILED: SemanticException [Error 10004]: Line 2:67 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) Index: ql/src/test/results/clientnegative/protectmode_tbl5.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_tbl5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_tbl5.q.out (working copy) @@ -59,4 +59,4 @@ p string #### A masked pattern was here #### -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_tbl5 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode_tbl5 Index: ql/src/test/results/clientnegative/udf_coalesce.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_coalesce.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_coalesce.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:33 Argument type mismatch ''2.0'': The expressions after COALESCE should all have the same type: "array" is expected but "string" is found +FAILED: SemanticException [Error 10016]: Line 1:33 Argument type mismatch ''2.0'': The expressions after COALESCE should all have the same type: "array" is expected but "string" is found Index: ql/src/test/results/clientnegative/invalid_t_create2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_create2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_t_create2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/genericFileFormat.q.out =================================================================== --- ql/src/test/results/clientnegative/genericFileFormat.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/genericFileFormat.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Unrecognized file format in STORED AS clause: foo +FAILED: SemanticException Unrecognized file format in STORED AS clause: foo Index: ql/src/test/results/clientnegative/create_view_failure8.q.out =================================================================== --- ql/src/test/results/clientnegative/create_view_failure8.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_view_failure8.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW xxx17 POSTHOOK: type: DROPVIEW -FAILED: Error in semantic analysis: Rightmost columns in view output do not match PARTITIONED ON clause +FAILED: SemanticException [Error 10093]: Rightmost columns in view output do not match PARTITIONED ON clause Index: ql/src/test/results/clientnegative/alter_view_failure7.q.out =================================================================== --- ql/src/test/results/clientnegative/alter_view_failure7.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/alter_view_failure7.q.out (working copy) @@ -15,4 +15,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@xxx8 #### A masked pattern was here #### -FAILED: Error in semantic analysis: table is partitioned but partition spec is not specified or does not fully match table partitioning: {ds=2011-01-01} +FAILED: SemanticException table is partitioned but partition spec is not specified or does not fully match table partitioning: {ds=2011-01-01} Index: ql/src/test/results/clientnegative/groupby2_map_skew_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientnegative/groupby2_map_skew_multi_distinct.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/groupby2_map_skew_multi_distinct.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: DISTINCT on different columns not supported with skew in data +FAILED: SemanticException [Error 10022]: DISTINCT on different columns not supported with skew in data Index: ql/src/test/results/clientnegative/duplicate_insert2.q.out =================================================================== --- ql/src/test/results/clientnegative/duplicate_insert2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/duplicate_insert2.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table dest1_din2(key int, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1_din2 -FAILED: Error in semantic analysis: The same output cannot be present multiple times: dest1_din2@ds=1 +FAILED: SemanticException [Error 10087]: The same output cannot be present multiple times: dest1_din2@ds=1 Index: ql/src/test/results/clientnegative/clustern4.q.out =================================================================== --- ql/src/test/results/clientnegative/clustern4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clustern4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:50 Invalid table alias or column reference 'key': (possible column names are: _col0, _col1) +FAILED: SemanticException [Error 10004]: Line 2:50 Invalid table alias or column reference 'key': (possible column names are: _col0, _col1) Index: ql/src/test/results/clientnegative/invalid_cast_to_binary_2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_to_binary_2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_to_binary_2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. +FAILED: SemanticException Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. Index: ql/src/test/results/clientnegative/udf_if_not_bool.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_if_not_bool.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_if_not_bool.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:10 Argument type mismatch ''STRING'': The first argument of function IF should be "boolean", but "string" is found +FAILED: SemanticException [Error 10016]: Line 1:10 Argument type mismatch ''STRING'': The first argument of function IF should be "boolean", but "string" is found Index: ql/src/test/results/clientnegative/regex_col_2.q.out =================================================================== --- ql/src/test/results/clientnegative/regex_col_2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/regex_col_2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:7 Invalid column reference '`.a.`' +FAILED: SemanticException [Error 10002]: Line 2:7 Invalid column reference '`.a.`' Index: ql/src/test/results/clientnegative/analyze.q.out =================================================================== --- ql/src/test/results/clientnegative/analyze.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/analyze.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Table is partitioned and partition specification is needed +FAILED: SemanticException [Error 10115]: Table is partitioned and partition specification is needed Index: ql/src/test/results/clientnegative/join2.q.out =================================================================== --- ql/src/test/results/clientnegative/join2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/join2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: MAPJOIN cannot be performed with OUTER JOIN +FAILED: SemanticException [Error 10057]: MAPJOIN cannot be performed with OUTER JOIN Index: ql/src/test/results/clientnegative/udf_size_wrong_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_size_wrong_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_size_wrong_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:12 Argument type mismatch ''wrong type: string'': "map" or "list" is expected at function SIZE, but "string" is found +FAILED: SemanticException [Error 10016]: Line 1:12 Argument type mismatch ''wrong type: string'': "map" or "list" is expected at function SIZE, but "string" is found Index: ql/src/test/results/clientnegative/create_view_failure3.q.out =================================================================== --- ql/src/test/results/clientnegative/create_view_failure3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_view_failure3.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW xxx13 POSTHOOK: type: DROPVIEW -FAILED: Error in semantic analysis: 5:16 The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW. Error encountered near token 'key' +FAILED: SemanticException 5:16 The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW. Error encountered near token 'key' Index: ql/src/test/results/clientnegative/archive_insert4.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_insert4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_insert4.q.out (working copy) @@ -31,4 +31,4 @@ POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Insert conflict with existing archive: ds=2008-04-08/hr=12 +FAILED: SemanticException Insert conflict with existing archive: ds=2008-04-08/hr=12 Index: ql/src/test/results/clientnegative/udf_map_keys_arg_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_map_keys_arg_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_map_keys_arg_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:16 Argument type mismatch '3': "map" is expected at function MAP_KEYS, but "array" is found +FAILED: SemanticException [Error 10016]: Line 1:16 Argument type mismatch '3': "map" is expected at function MAP_KEYS, but "array" is found Index: ql/src/test/results/clientnegative/load_wrong_noof_part.q.out =================================================================== --- ql/src/test/results/clientnegative/load_wrong_noof_part.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/load_wrong_noof_part.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE loadpart1(a STRING, b STRING) PARTITIONED BY (ds STRING,ds1 STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@loadpart1 -FAILED: Error in semantic analysis: Line 2:79 Partition not found ''2009-05-05'' +FAILED: SemanticException [Error 10006]: Line 2:79 Partition not found ''2009-05-05'' Index: ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_02_all_part_over_overlap.q.out (working copy) @@ -99,4 +99,4 @@ POSTHOOK: type: LOAD POSTHOOK: Output: importer@exim_employee POSTHOOK: Output: importer@exim_employee@emp_country=us/emp_state=ka -FAILED: Error in semantic analysis: Partition already exists emp_country=us,emp_state=ka +FAILED: SemanticException [Error 10118]: Partition already exists emp_country=us,emp_state=ka Index: ql/src/test/results/clientnegative/archive5.q.out =================================================================== --- ql/src/test/results/clientnegative/archive5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive5.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Partition value contains a reserved substring (User value: 14_INTERMEDIATE_ORIGINAL Reserved substring: _INTERMEDIATE_ORIGINAL) +FAILED: SemanticException [Error 10111]: Partition value contains a reserved substring (User value: 14_INTERMEDIATE_ORIGINAL Reserved substring: _INTERMEDIATE_ORIGINAL) Index: ql/src/test/results/clientnegative/udf_array_contains_wrong2.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_array_contains_wrong2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_array_contains_wrong2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:38 Argument type mismatch ''2'': "int" expected at function ARRAY_CONTAINS, but "string" is found +FAILED: SemanticException [Error 10016]: Line 2:38 Argument type mismatch ''2'': "int" expected at function ARRAY_CONTAINS, but "string" is found Index: ql/src/test/results/clientnegative/wrong_column_type.q.out =================================================================== --- ql/src/test/results/clientnegative/wrong_column_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/wrong_column_type.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(a float) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) +FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/udf_locate_wrong_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_locate_wrong_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_locate_wrong_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:22 Argument type mismatch 'lintstring': The 2nd argument of function LOCATE is expected to a primitive type, but list is found +FAILED: SemanticException [Error 10016]: Line 2:22 Argument type mismatch 'lintstring': The 2nd argument of function LOCATE is expected to a primitive type, but list is found Index: ql/src/test/results/clientnegative/udf_max.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_max.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_max.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Cannot support comparison of map<> type or complex type containing map<>. +FAILED: UDFArgumentTypeException Cannot support comparison of map<> type or complex type containing map<>. Index: ql/src/test/results/clientnegative/protectmode_part2.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_part2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_part2.q.out (working copy) @@ -33,4 +33,4 @@ POSTHOOK: Input: default@tbl_protectmode6 POSTHOOK: Input: default@tbl_protectmode6@p=p1 POSTHOOK: Output: default@tbl_protectmode6@p=p1 -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: Query against an offline table or partition tbl_protectmode6:p=p1 +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Query against an offline table or partition tbl_protectmode6:p=p1 Index: ql/src/test/results/clientnegative/udf_printf_wrong2.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_printf_wrong2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_printf_wrong2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:14 Argument type mismatch '100': Argument 1 of function PRINTF must be "string", but "int" was found. +FAILED: SemanticException [Error 10016]: Line 2:14 Argument type mismatch '100': Argument 1 of function PRINTF must be "string", but "int" was found. Index: ql/src/test/results/clientnegative/show_tables_bad1.q.out =================================================================== --- ql/src/test/results/clientnegative/show_tables_bad1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/show_tables_bad1.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:12 mismatched input '' expecting set null in Identifier for show statement +FAILED: ParseException line 1:12 mismatched input '' expecting set null in Identifier for show statement Index: ql/src/test/results/clientnegative/archive_partspec3.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_partspec3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_partspec3.q.out (working copy) @@ -21,5 +21,5 @@ POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Parse Error: line 3:48 mismatched input ')' expecting Identifier near '(' in archive statement +FAILED: ParseException line 3:48 mismatched input ')' expecting Identifier near '(' in archive statement Index: ql/src/test/results/clientnegative/minimr_broken_pipe.q.out =================================================================== --- ql/src/test/results/clientnegative/minimr_broken_pipe.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/minimr_broken_pipe.q.out (working copy) @@ -3,4 +3,4 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 20003 from org.apache.hadoop.hive.ql.exec.MapRedTask. An error occurred when trying to close the Operator running your custom script. Index: ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_08_nonpart_noncompat_serde.q.out (working copy) @@ -48,4 +48,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde class does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Table Serde class does not match Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/input2.q.out =================================================================== --- ql/src/test/results/clientnegative/input2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/input2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Invalid table alias or column reference 'a': (possible column names are: key, value) +FAILED: SemanticException [Error 10004]: Line 1:7 Invalid table alias or column reference 'a': (possible column names are: key, value) Index: ql/src/test/results/clientnegative/udtf_explode_not_supported1.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_explode_not_supported1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_explode_not_supported1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: GROUP BY is not supported with a UDTF in the SELECT clause +FAILED: SemanticException [Error 10077]: GROUP BY is not supported with a UDTF in the SELECT clause Index: ql/src/test/results/clientnegative/fs_default_name1.q.out =================================================================== --- ql/src/test/results/clientnegative/fs_default_name1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/fs_default_name1.q.out (working copy) @@ -1,4 +1 @@ -FAILED: Hive Internal Error: java.lang.IllegalArgumentException(null) -java.lang.IllegalArgumentException -#### A masked pattern was here #### - +FAILED: IllegalArgumentException null Index: ql/src/test/results/clientnegative/invalid_variance_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_variance_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_variance_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/nonkey_groupby.q.out =================================================================== --- ql/src/test/results/clientnegative/nonkey_groupby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/nonkey_groupby.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:15 Expression not in GROUP BY key 'key' +FAILED: SemanticException [Error 10025]: Line 1:15 Expression not in GROUP BY key 'key' Index: ql/src/test/results/clientnegative/create_or_replace_view5.q.out =================================================================== --- ql/src/test/results/clientnegative/create_or_replace_view5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_or_replace_view5.q.out (working copy) @@ -13,4 +13,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@v #### A masked pattern was here #### -FAILED: Error in semantic analysis: Can't combine IF NOT EXISTS and OR REPLACE. +FAILED: SemanticException Can't combine IF NOT EXISTS and OR REPLACE. Index: ql/src/test/results/clientnegative/fileformat_void_output.q.out =================================================================== --- ql/src/test/results/clientnegative/fileformat_void_output.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/fileformat_void_output.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat +FAILED: SemanticException [Error 10055]: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat Index: ql/src/test/results/clientnegative/invalidate_view1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalidate_view1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalidate_view1.q.out (working copy) @@ -41,7 +41,7 @@ POSTHOOK: type: ALTERTABLE_REPLACECOLS POSTHOOK: Input: default@xxx10 POSTHOOK: Output: default@xxx10 -FAILED: Error in semantic analysis: Line 1:30 Invalid column reference '`value`' in definition of VIEW xxx9 [ +FAILED: SemanticException Line 1:30 Invalid column reference '`value`' in definition of VIEW xxx9 [ SELECT `xxx10`.`key`, `xxx10`.`value` FROM `default`.`xxx10` ] used as xxx at Line 1:39 in definition of VIEW xxx8 [ SELECT `xxx`.`key`, `xxx`.`value` FROM `default`.`xxx9` `xxx` Index: ql/src/test/results/clientnegative/line_terminator.q.out =================================================================== --- ql/src/test/results/clientnegative/line_terminator.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/line_terminator.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 3:20 LINES TERMINATED BY only supports newline '\n' right now. Error encountered near token '','' +FAILED: SemanticException 3:20 LINES TERMINATED BY only supports newline '\n' right now. Error encountered near token '','' Index: ql/src/test/results/clientnegative/index_bitmap_no_map_aggr.q.out =================================================================== --- ql/src/test/results/clientnegative/index_bitmap_no_map_aggr.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/index_bitmap_no_map_aggr.q.out (working copy) @@ -19,4 +19,4 @@ POSTHOOK: query: CREATE INDEX src1_index ON TABLE src(key) as 'BITMAP' WITH DEFERRED REBUILD POSTHOOK: type: CREATEINDEX POSTHOOK: Output: default@default__src_src1_index__ -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: org.apache.hadoop.hive.ql.metadata.HiveException: Cannot construct index without map-side aggregation +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: org.apache.hadoop.hive.ql.metadata.HiveException: Cannot construct index without map-side aggregation Index: ql/src/test/results/clientnegative/invalid_create_tbl2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_create_tbl2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_create_tbl2.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:7 Failed to recognize predicate 'tabl'. Failed rule: 'kwRole' in create role +FAILED: ParseException line 1:7 Failed to recognize predicate 'tabl'. Failed rule: 'kwRole' in create role Index: ql/src/test/results/clientnegative/semijoin4.q.out =================================================================== --- ql/src/test/results/clientnegative/semijoin4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/semijoin4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:112 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) +FAILED: SemanticException [Error 10004]: Line 2:112 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) Index: ql/src/test/results/clientnegative/dyn_part4.q.out =================================================================== --- ql/src/test/results/clientnegative/dyn_part4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/dyn_part4.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table nzhang_part4 (key string) partitioned by (ds string, hr string, value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@nzhang_part4 -FAILED: Error in semantic analysis: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, value), while the partitions specified in the query are: (value, ds, hr). +FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, value), while the partitions specified in the query are: (value, ds, hr). Index: ql/src/test/results/clientnegative/fileformat_bad_class.q.out =================================================================== --- ql/src/test/results/clientnegative/fileformat_bad_class.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/fileformat_bad_class.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat +FAILED: SemanticException [Error 10055]: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat Index: ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out =================================================================== --- ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/smb_bucketmapjoin.q.out (working copy) @@ -34,4 +34,4 @@ POSTHOOK: Lineage: smb_bucket4_1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: smb_bucket4_2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: MAPJOIN cannot be performed with OUTER JOIN +FAILED: SemanticException [Error 10057]: MAPJOIN cannot be performed with OUTER JOIN Index: ql/src/test/results/clientnegative/create_view_failure9.q.out =================================================================== --- ql/src/test/results/clientnegative/create_view_failure9.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_view_failure9.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW xxx18 POSTHOOK: type: DROPVIEW -FAILED: Error in semantic analysis: Rightmost columns in view output do not match PARTITIONED ON clause +FAILED: SemanticException [Error 10093]: Rightmost columns in view output do not match PARTITIONED ON clause Index: ql/src/test/results/clientnegative/having1.q.out =================================================================== --- ql/src/test/results/clientnegative/having1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/having1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: HAVING specified without GROUP BY +FAILED: SemanticException HAVING specified without GROUP BY Index: ql/src/test/results/clientnegative/uniquejoin2.q.out =================================================================== --- ql/src/test/results/clientnegative/uniquejoin2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/uniquejoin2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Tables with different or invalid number of keys in UNIQUEJOIN +FAILED: SemanticException Tables with different or invalid number of keys in UNIQUEJOIN Index: ql/src/test/results/clientnegative/regex_col_groupby.q.out =================================================================== --- ql/src/test/results/clientnegative/regex_col_groupby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/regex_col_groupby.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:44 Invalid table alias or column reference '`..`': (possible column names are: key, value, ds, hr) +FAILED: SemanticException [Error 10004]: Line 2:44 Invalid table alias or column reference '`..`': (possible column names are: key, value, ds, hr) Index: ql/src/test/results/clientnegative/groupby3_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientnegative/groupby3_multi_distinct.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/groupby3_multi_distinct.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE, c6 DOUBLE, c7 DOUBLE, c8 DOUBLE, c9 DOUBLE, c10 DOUBLE, c11 DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: DISTINCT on different columns not supported with skew in data +FAILED: SemanticException [Error 10022]: DISTINCT on different columns not supported with skew in data Index: ql/src/test/results/clientnegative/invalid_cast_to_binary_3.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_to_binary_3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_to_binary_3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. +FAILED: SemanticException Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. Index: ql/src/test/results/clientnegative/select_charliteral.q.out =================================================================== --- ql/src/test/results/clientnegative/select_charliteral.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/select_charliteral.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 3:11 mismatched input ',' expecting \' near '_c17' in character string literal +FAILED: ParseException line 3:11 mismatched input ',' expecting \' near '_c17' in character string literal Index: ql/src/test/results/clientnegative/udf_locate_wrong_args_len.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_locate_wrong_args_len.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_locate_wrong_args_len.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Arguments length mismatch '2': The function LOCATE accepts exactly 2 or 3 arguments. +FAILED: SemanticException [Error 10015]: Line 1:7 Arguments length mismatch '2': The function LOCATE accepts exactly 2 or 3 arguments. Index: ql/src/test/results/clientnegative/analyze_view.q.out =================================================================== --- ql/src/test/results/clientnegative/analyze_view.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/analyze_view.q.out (working copy) @@ -9,4 +9,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@av #### A masked pattern was here #### -FAILED: Error in semantic analysis: ANALYZE is not supported for views +FAILED: SemanticException [Error 10091]: ANALYZE is not supported for views Index: ql/src/test/results/clientnegative/subq_insert.q.out =================================================================== --- ql/src/test/results/clientnegative/subq_insert.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/subq_insert.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:38 Cannot insert in a subquery. Inserting to table 'src1' +FAILED: SemanticException [Error 10024]: Line 2:38 Cannot insert in a subquery. Inserting to table 'src1' Index: ql/src/test/results/clientnegative/dyn_part_merge.q.out =================================================================== --- ql/src/test/results/clientnegative/dyn_part_merge.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/dyn_part_merge.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table dyn_merge(key string, value string) partitioned by (ds string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dyn_merge -FAILED: Error in semantic analysis: Dynamic partition does not support merging using non-CombineHiveInputFormatPlease check your hive.input.format setting and make sure your Hadoop version support CombineFileInputFormat +FAILED: SemanticException [Error 10097]: Dynamic partition does not support merging using non-CombineHiveInputFormat. Please check your hive.input.format setting and make sure your Hadoop version support CombineFileInputFormat Index: ql/src/test/results/clientnegative/invalid_t_transform.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_transform.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_t_transform.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/protectmode_tbl1.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_tbl1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_tbl1.q.out (working copy) @@ -27,4 +27,4 @@ POSTHOOK: type: ALTERTABLE_PROTECTMODE POSTHOOK: Input: default@tbl_protectmode_1 POSTHOOK: Output: default@tbl_protectmode_1 -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_1 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode_1 Index: ql/src/test/results/clientnegative/udf_when_type_wrong.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_when_type_wrong.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_when_type_wrong.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 3:13 Argument type mismatch ''1'': "boolean" is expected after WHEN, but "string" is found +FAILED: SemanticException [Error 10016]: Line 3:13 Argument type mismatch ''1'': "boolean" is expected after WHEN, but "string" is found Index: ql/src/test/results/clientnegative/compare_double_bigint.q.out =================================================================== --- ql/src/test/results/clientnegative/compare_double_bigint.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/compare_double_bigint.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '1.0': In strict mode, comparing bigints and doubles is not allowed, it may result in a loss of precision. If you really want to perform the operation, set hive.mapred.mode=nonstrict +FAILED: SemanticException Line 0:-1 Wrong arguments '1.0': In strict mode, comparing bigints and doubles is not allowed, it may result in a loss of precision. If you really want to perform the operation, set hive.mapred.mode=nonstrict Index: ql/src/test/results/clientnegative/split_sample_wrong_format.q.out =================================================================== --- ql/src/test/results/clientnegative/split_sample_wrong_format.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/split_sample_wrong_format.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 3:32 Percentage sampling is not supported in org.apache.hadoop.hive.ql.io.HiveInputFormat. Error encountered near token '1' +FAILED: SemanticException 3:32 Percentage sampling is not supported in org.apache.hadoop.hive.ql.io.HiveInputFormat. Error encountered near token '1' Index: ql/src/test/results/clientnegative/notable_alias3.q.out =================================================================== --- ql/src/test/results/clientnegative/notable_alias3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/notable_alias3.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, value DOUBLE) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: Line 4:44 Expression not in GROUP BY key 'key' +FAILED: SemanticException [Error 10025]: Line 4:44 Expression not in GROUP BY key 'key' Index: ql/src/test/results/clientnegative/sample.q.out =================================================================== --- ql/src/test/results/clientnegative/sample.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/sample.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Numberator should not be bigger than denaminator in sample clause for table srcbucket +FAILED: SemanticException [Error 10061]: Numberator should not be bigger than denaminator in sample clause for table srcbucket Index: ql/src/test/results/clientnegative/select_udtf_alias.q.out =================================================================== --- ql/src/test/results/clientnegative/select_udtf_alias.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/select_udtf_alias.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 3:49 mismatched input 'LIMIT' expecting FROM near ')' in from clause +FAILED: ParseException line 3:49 mismatched input 'LIMIT' expecting FROM near ')' in from clause Index: ql/src/test/results/clientnegative/create_insert_outputformat.q.out =================================================================== --- ql/src/test/results/clientnegative/create_insert_outputformat.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_insert_outputformat.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat +FAILED: SemanticException [Error 10055]: Output Format must implement HiveOutputFormat, otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat Index: ql/src/test/results/clientnegative/udaf_invalid_place.q.out =================================================================== --- ql/src/test/results/clientnegative/udaf_invalid_place.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udaf_invalid_place.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:21 Not yet supported place for UDAF 'sum' +FAILED: SemanticException [Error 10128]: Line 1:21 Not yet supported place for UDAF 'sum' Index: ql/src/test/results/clientnegative/udf_printf_wrong3.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_printf_wrong3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_printf_wrong3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:32 Argument type mismatch '"argument"': Argument 2 of function PRINTF must be "PRIMITIVE", but "array" was found. +FAILED: SemanticException [Error 10016]: Line 2:32 Argument type mismatch '"argument"': Argument 2 of function PRINTF must be "PRIMITIVE", but "array" was found. Index: ql/src/test/results/clientnegative/show_tables_bad2.q.out =================================================================== --- ql/src/test/results/clientnegative/show_tables_bad2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/show_tables_bad2.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:25 mismatched input '' expecting set null in Identifier for show statement +FAILED: ParseException line 1:25 mismatched input '' expecting set null in Identifier for show statement Index: ql/src/test/results/clientnegative/archive_partspec4.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_partspec4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_partspec4.q.out (working copy) @@ -21,4 +21,4 @@ POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (hr, ds). +FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (hr, ds). Index: ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_case_type_wrong2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 3:20 Argument type mismatch '4': The expressions after THEN should have the same type: "string" is expected but "int" is found +FAILED: SemanticException [Error 10016]: Line 3:20 Argument type mismatch '4': The expressions after THEN should have the same type: "string" is expected but "int" is found Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/udf_sort_array_wrong1.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_sort_array_wrong1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_sort_array_wrong1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:7 Arguments length mismatch '3': The function SORT_ARRAY(array(obj1, obj2,...)) needs one argument. +FAILED: SemanticException [Error 10015]: Line 2:7 Arguments length mismatch '3': The function SORT_ARRAY(array(obj1, obj2,...)) needs one argument. Index: ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out =================================================================== --- ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/alter_concatenate_indexed_table.q.out (working copy) @@ -66,4 +66,4 @@ POSTHOOK: query: show indexes on src_rc_concatenate_test POSTHOOK: type: SHOWINDEXES src_rc_concatenate_test_index src_rc_concatenate_test key default__src_rc_concatenate_test_src_rc_concatenate_test_index__ compact -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: can not do merge because source table src_rc_concatenate_test is indexed. +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: can not do merge because source table src_rc_concatenate_test is indexed. Index: ql/src/test/results/clientnegative/udtf_explode_not_supported2.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_explode_not_supported2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_explode_not_supported2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 2 aliases but got 3 +FAILED: SemanticException [Error 10083]: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 2 aliases but got 3 Index: ql/src/test/results/clientnegative/fs_default_name2.q.out =================================================================== --- ql/src/test/results/clientnegative/fs_default_name2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/fs_default_name2.q.out (working copy) @@ -1,4 +1 @@ -FAILED: Hive Internal Error: java.lang.IllegalArgumentException(null) -java.lang.IllegalArgumentException -#### A masked pattern was here #### - +FAILED: IllegalArgumentException null Index: ql/src/test/results/clientnegative/invalid_max_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_max_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_max_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/drop_table_failure1.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_table_failure1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/drop_table_failure1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Table not found UnknownTable +FAILED: SemanticException [Error 10001]: Table not found UnknownTable Index: ql/src/test/results/clientnegative/invalid_select_expression.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_select_expression.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_select_expression.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:32 cannot recognize input near '.' 'foo' '' in expression specification +FAILED: ParseException line 1:32 cannot recognize input near '.' 'foo' '' in expression specification Index: ql/src/test/results/clientnegative/invalid_std_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_std_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_std_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/create_or_replace_view6.q.out =================================================================== --- ql/src/test/results/clientnegative/create_or_replace_view6.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_or_replace_view6.q.out (working copy) @@ -13,5 +13,5 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@v #### A masked pattern was here #### -FAILED: Parse Error: line 2:52 cannot recognize input near 'blah' '' '' in select clause +FAILED: ParseException line 2:52 cannot recognize input near 'blah' '' '' in select clause Index: ql/src/test/results/clientnegative/input_part0_neg.q.out =================================================================== --- ql/src/test/results/clientnegative/input_part0_neg.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/input_part0_neg.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: No partition predicate found for Alias "x" Table "srcpart" +FAILED: SemanticException [Error 10041]: No partition predicate found for Alias "x" Table "srcpart" Index: ql/src/test/results/clientnegative/insert_view_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/insert_view_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/insert_view_failure.q.out (working copy) @@ -9,4 +9,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@xxx2 #### A masked pattern was here #### -FAILED: Error in semantic analysis: A view cannot be used as target table for LOAD or INSERT +FAILED: SemanticException [Error 10090]: A view cannot be used as target table for LOAD or INSERT Index: ql/src/test/results/clientnegative/udf_instr_wrong_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_instr_wrong_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_instr_wrong_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:21 Argument type mismatch 'lintstring': The 2nd argument of function INSTR is expected to a primitive type, but list is found +FAILED: SemanticException [Error 10016]: Line 2:21 Argument type mismatch 'lintstring': The 2nd argument of function INSTR is expected to a primitive type, but list is found Index: ql/src/test/results/clientnegative/load_part_nospec.q.out =================================================================== --- ql/src/test/results/clientnegative/load_part_nospec.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/load_part_nospec.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@hive_test_src -FAILED: Error in semantic analysis: Need to specify partition columns because the destination table is partitioned +FAILED: SemanticException [Error 10062]: Need to specify partition columns because the destination table is partitioned Index: ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_17_part_spec_underspec.q.out (working copy) @@ -77,4 +77,4 @@ PREHOOK: type: SWITCHDATABASE POSTHOOK: query: use importer POSTHOOK: type: SWITCHDATABASE -FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory +FAILED: SemanticException [Error 10006]: Partition not found - Specified partition not found in import directory Index: ql/src/test/results/clientnegative/udf_map_values_arg_type.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_map_values_arg_type.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_map_values_arg_type.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:18 Argument type mismatch '4': "map" is expected at function MAP_VALUES, but "array" is found +FAILED: SemanticException [Error 10016]: Line 1:18 Argument type mismatch '4': "map" is expected at function MAP_VALUES, but "array" is found Index: ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_11_nonpart_noncompat_sorting.q.out (working copy) @@ -50,4 +50,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table sorting spec does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Table sorting spec does not match Index: ql/src/test/results/clientnegative/lateral_view_alias.q.out =================================================================== --- ql/src/test/results/clientnegative/lateral_view_alias.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/lateral_view_alias.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 1 aliases but got 2 +FAILED: SemanticException [Error 10083]: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 1 aliases but got 2 Index: ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_09_nonpart_noncompat_serdeparam.q.out (working copy) @@ -56,4 +56,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table Serde format does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Table Serde format does not match Index: ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out =================================================================== --- ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/insertover_dynapart_ifnotexists.q.out (working copy) @@ -15,4 +15,4 @@ POSTHOOK: type: LOAD POSTHOOK: Output: default@srcpart_dp POSTHOOK: Output: default@srcpart_dp@ds=2008-04-08/hr=11 -FAILED: Error in semantic analysis: Dynamic partitions do not support IF NOT EXISTS. Specified partitions with value : {ds=2008-04-08} +FAILED: SemanticException [Error 10127]: Dynamic partitions do not support IF NOT EXISTS. Specified partitions with value : {ds=2008-04-08} Index: ql/src/test/results/clientnegative/udf_map_values_arg_num.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_map_values_arg_num.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_map_values_arg_num.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Arguments length mismatch '"2"': The function MAP_VALUES only accepts 1 argument. +FAILED: SemanticException [Error 10015]: Line 1:7 Arguments length mismatch '"2"': The function MAP_VALUES only accepts 1 argument. Index: ql/src/test/results/clientnegative/udtf_not_supported1.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_not_supported1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_not_supported1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 1:39 Only a single expression in the SELECT clause is supported with UDTF's. Error encountered near token 'key' +FAILED: SemanticException 1:39 Only a single expression in the SELECT clause is supported with UDTF's. Error encountered near token 'key' Index: ql/src/test/results/clientnegative/uniquejoin3.q.out =================================================================== --- ql/src/test/results/clientnegative/uniquejoin3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/uniquejoin3.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:54 required (...)+ loop did not match anything at input 'JOIN' in statement +FAILED: ParseException line 1:54 required (...)+ loop did not match anything at input 'JOIN' in statement Index: ql/src/test/results/clientnegative/udf_concat_ws_wrong1.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_concat_ws_wrong1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_concat_ws_wrong1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:7 Arguments length mismatch ''-'': The function CONCAT_WS(separator,[string | array(string)]+) needs at least two arguments. +FAILED: SemanticException [Error 10015]: Line 2:7 Arguments length mismatch ''-'': The function CONCAT_WS(separator,[string | array(string)]+) needs at least two arguments. Index: ql/src/test/results/clientnegative/local_mapred_error_cache.q.out =================================================================== --- ql/src/test/results/clientnegative/local_mapred_error_cache.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/local_mapred_error_cache.q.out (working copy) @@ -13,11 +13,11 @@ #### A masked pattern was here #### ID: Stage-1 -org.apache.hadoop.hive.ql.metadata.HiveException: Hit error while closing .. +org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script. #### A masked pattern was here #### -org.apache.hadoop.hive.ql.metadata.HiveException: Hit error while closing .. +org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script. #### A masked pattern was here #### -org.apache.hadoop.hive.ql.metadata.HiveException: Hit error while closing .. +org.apache.hadoop.hive.ql.metadata.HiveException: [Error 20003]: An error occurred when trying to close the Operator running your custom script. #### A masked pattern was here #### Ended Job = job_local_0001 with errors Error during job, obtaining debugging information... Index: ql/src/test/results/clientnegative/drop_view_failure2.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_view_failure2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/drop_view_failure2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Table not found UnknownView +FAILED: SemanticException [Error 10001]: Table not found UnknownView Index: ql/src/test/results/clientnegative/invalid_cast_to_binary_4.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_to_binary_4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_to_binary_4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. +FAILED: SemanticException Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. Index: ql/src/test/results/clientnegative/protectmode_tbl2.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_tbl2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_tbl2.q.out (working copy) @@ -50,4 +50,4 @@ p string #### A masked pattern was here #### -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode2 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode2 Index: ql/src/test/results/clientnegative/create_udaf_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/create_udaf_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_udaf_failure.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: CREATEFUNCTION POSTHOOK: query: CREATE TEMPORARY FUNCTION test_udaf AS 'org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase' POSTHOOK: type: CREATEFUNCTION -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException: public boolean org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase$UDAFWrongArgLengthForTestCaseEvaluator.merge() requires 0 arguments but 1 are passed in. +FAILED: SemanticException org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException: public boolean org.apache.hadoop.hive.ql.udf.UDAFWrongArgLengthForTestCase$UDAFWrongArgLengthForTestCaseEvaluator.merge() requires 0 arguments but 1 are passed in. Index: ql/src/test/results/clientnegative/invalid_sum_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_sum_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_sum_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/notable_alias4.q.out =================================================================== --- ql/src/test/results/clientnegative/notable_alias4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/notable_alias4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries +FAILED: SemanticException Column key Found in more than One Tables/Subqueries Index: ql/src/test/results/clientnegative/invalid_tbl_name.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_tbl_name.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_tbl_name.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:20 cannot recognize input near '-' 'name' '(' in create table statement +FAILED: ParseException line 1:20 cannot recognize input near '-' 'name' '(' in create table statement Index: ql/src/test/results/clientnegative/create_view_failure5.q.out =================================================================== --- ql/src/test/results/clientnegative/create_view_failure5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_view_failure5.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW xxx14 POSTHOOK: type: DROPVIEW -FAILED: Error in semantic analysis: Duplicate column name: key +FAILED: SemanticException [Error 10036]: Duplicate column name: key Index: ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_03_nonpart_noncompat_colschema.q.out (working copy) @@ -46,4 +46,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Column Schema does not match Index: ql/src/test/results/clientnegative/union2.q.out =================================================================== --- ql/src/test/results/clientnegative/union2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/union2.q.out (working copy) @@ -8,4 +8,4 @@ POSTHOOK: query: create table if not exists union2_t2(s string, c string, v string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@union2_t2 -FAILED: Error in semantic analysis: 8:47 Schema of both sides of union should match: Column v is of type array on first table and type double on second table. Error encountered near token 'union2_t2' +FAILED: SemanticException 8:47 Schema of both sides of union should match: Column v is of type array on first table and type double on second table. Error encountered near token 'union2_t2' Index: ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_13_nonnative_import.q.out (working copy) @@ -46,4 +46,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: Export/Import cannot be done for a non-native table. +FAILED: SemanticException [Error 10121]: Export/Import cannot be done for a non-native table. Index: ql/src/test/results/clientnegative/clustern1.q.out =================================================================== --- ql/src/test/results/clientnegative/clustern1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clustern1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries +FAILED: SemanticException Column key Found in more than One Tables/Subqueries Index: ql/src/test/results/clientnegative/strict_orderby.q.out =================================================================== --- ql/src/test/results/clientnegative/strict_orderby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/strict_orderby.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 4:47 In strict mode, if ORDER BY is specified, LIMIT must also be specified. Error encountered near token 'key' +FAILED: SemanticException 4:47 In strict mode, if ORDER BY is specified, LIMIT must also be specified. Error encountered near token 'key' Index: ql/src/test/results/clientnegative/udf_printf_wrong4.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_printf_wrong4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_printf_wrong4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:32 Argument type mismatch '"argument"': Argument 2 of function PRINTF must be "PRIMITIVE", but "array" was found. +FAILED: SemanticException [Error 10016]: Line 2:32 Argument type mismatch '"argument"': Argument 2 of function PRINTF must be "PRIMITIVE", but "array" was found. Index: ql/src/test/results/clientnegative/drop_partition_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_partition_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/drop_partition_failure.q.out (working copy) @@ -31,4 +31,4 @@ b=1/c=1 b=1/c=2 b=2/c=2 -FAILED: Error in semantic analysis: Partition not found b = '3' +FAILED: SemanticException [Error 10006]: Partition not found b = '3' Index: ql/src/test/results/clientnegative/index_compact_size_limit.q.out =================================================================== --- ql/src/test/results/clientnegative/index_compact_size_limit.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/index_compact_size_limit.q.out (working copy) @@ -32,7 +32,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -Execution failed with exit status: 2 +Execution failed with exit status: 1 Obtaining error information Task failed! @@ -42,4 +42,4 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask Index: ql/src/test/results/clientnegative/archive_partspec5.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_partspec5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_partspec5.q.out (working copy) @@ -21,4 +21,4 @@ POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, min), while the partitions specified in the query are: (ds, min). +FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr, min), while the partitions specified in the query are: (ds, min). Index: ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_case_type_wrong3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 4:13 Argument type mismatch '7': The expression after ELSE should have the same type as those after THEN: "string" is expected but "int" is found +FAILED: SemanticException [Error 10016]: Line 4:13 Argument type mismatch '7': The expression after ELSE should have the same type as those after THEN: "string" is expected but "int" is found Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(string) _FUNC_(timestamp) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/udf_sort_array_wrong2.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_sort_array_wrong2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_sort_array_wrong2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:18 Argument type mismatch '"Invalid"': Argument 1 of function SORT_ARRAY must be array, but string was found. +FAILED: SemanticException [Error 10016]: Line 2:18 Argument type mismatch '"Invalid"': Argument 1 of function SORT_ARRAY must be array, but string was found. Index: ql/src/test/results/clientnegative/input4.q.out =================================================================== --- ql/src/test/results/clientnegative/input4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/input4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict +FAILED: SemanticException [Error 10052]: In strict mode, cartesian product is not allowed. If you really want to perform the operation, set hive.mapred.mode=nonstrict Index: ql/src/test/results/clientnegative/udf_instr_wrong_args_len.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_instr_wrong_args_len.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_instr_wrong_args_len.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Arguments length mismatch ''abcd'': The function INSTR accepts exactly 2 arguments. +FAILED: SemanticException [Error 10015]: Line 1:7 Arguments length mismatch ''abcd'': The function INSTR accepts exactly 2 arguments. Index: ql/src/test/results/clientnegative/archive_insert1.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_insert1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_insert1.q.out (working copy) @@ -31,4 +31,4 @@ POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Insert conflict with existing archive: ds=2008-04-08/hr=12 +FAILED: SemanticException Insert conflict with existing archive: ds=2008-04-08/hr=12 Index: ql/src/test/results/clientnegative/udtf_explode_not_supported3.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_explode_not_supported3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_explode_not_supported3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: explode() takes only one argument +FAILED: UDFArgumentException explode() takes only one argument Index: ql/src/test/results/clientnegative/groupby_key.q.out =================================================================== --- ql/src/test/results/clientnegative/groupby_key.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/groupby_key.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Expression not in GROUP BY key 'value' +FAILED: SemanticException [Error 10025]: Line 1:7 Expression not in GROUP BY key 'value' Index: ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_18_part_spec_missing.q.out (working copy) @@ -77,4 +77,4 @@ PREHOOK: type: SWITCHDATABASE POSTHOOK: query: use importer POSTHOOK: type: SWITCHDATABASE -FAILED: Error in semantic analysis: Partition not found - Specified partition not found in import directory +FAILED: SemanticException [Error 10006]: Partition not found - Specified partition not found in import directory Index: ql/src/test/results/clientnegative/create_or_replace_view7.q.out =================================================================== --- ql/src/test/results/clientnegative/create_or_replace_view7.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_or_replace_view7.q.out (working copy) @@ -33,4 +33,4 @@ POSTHOOK: Input: default@v2 POSTHOOK: Output: default@v3 #### A masked pattern was here #### -FAILED: Error in semantic analysis: Recursive view default.v1 detected (cycle: default.v1 -> default.v3 -> default.v2 -> default.v1). +FAILED: SemanticException Recursive view default.v1 detected (cycle: default.v1 -> default.v3 -> default.v2 -> default.v1). Index: ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_05_nonpart_noncompat_coltype.q.out (working copy) @@ -46,4 +46,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Column Schema does not match Index: ql/src/test/results/clientnegative/insertexternal1.q.out =================================================================== --- ql/src/test/results/clientnegative/insertexternal1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/insertexternal1.q.out (working copy) @@ -10,4 +10,4 @@ POSTHOOK: type: ALTERTABLE_ADDPARTS POSTHOOK: Input: default@texternal POSTHOOK: Output: default@texternal@insertdate=2008-01-01 -FAILED: Error in semantic analysis: Inserting into a external table is not allowed texternal +FAILED: SemanticException [Error 10071]: Inserting into a external table is not allowed texternal Index: ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_00_unsupported_schema.q.out (working copy) @@ -13,4 +13,4 @@ POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department POSTHOOK: type: LOAD POSTHOOK: Output: default@exim_department -FAILED: Error in semantic analysis: Invalid path only the following file systems accepted for export/import : hdfs,pfile +FAILED: SemanticException Invalid path only the following file systems accepted for export/import : hdfs,pfile Index: ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_04_nonpart_noncompat_colnumber.q.out (working copy) @@ -46,4 +46,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Column Schema does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Column Schema does not match Index: ql/src/test/results/clientnegative/recursive_view.q.out =================================================================== --- ql/src/test/results/clientnegative/recursive_view.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/recursive_view.q.out (working copy) @@ -84,4 +84,4 @@ POSTHOOK: Input: default@r3 POSTHOOK: Output: default@r0 POSTHOOK: Output: default@r3 -FAILED: Error in semantic analysis: Recursive view default.r0 detected (cycle: default.r0 -> default.r2 -> default.r1 -> default.r0). +FAILED: SemanticException Recursive view default.r0 detected (cycle: default.r0 -> default.r2 -> default.r1 -> default.r0). Index: ql/src/test/results/clientnegative/nopart_load.q.out =================================================================== --- ql/src/test/results/clientnegative/nopart_load.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/nopart_load.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE nopart_load(a STRING, b STRING) PARTITIONED BY (ds STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@nopart_load -FAILED: Error in semantic analysis: Need to specify partition columns because the destination table is partitioned +FAILED: SemanticException [Error 10062]: Need to specify partition columns because the destination table is partitioned Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(tinyint) _FUNC_(smallint) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/ddltime.q.out =================================================================== --- ql/src/test/results/clientnegative/ddltime.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/ddltime.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table T2 like srcpart POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@T2 -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: 3:23 HOLD_DDLTIME hint cannot be applied to dynamic partitions or non-existent partitions. Error encountered near token ''1'' +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: 3:23 HOLD_DDLTIME hint cannot be applied to dynamic partitions or non-existent partitions. Error encountered near token ''1'' Index: ql/src/test/results/clientnegative/groupby2_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientnegative/groupby2_multi_distinct.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/groupby2_multi_distinct.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest_g2(key STRING, c1 INT, c2 STRING, c3 INT, c4 INT) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest_g2 -FAILED: Error in semantic analysis: DISTINCT on different columns not supported with skew in data +FAILED: SemanticException [Error 10022]: DISTINCT on different columns not supported with skew in data Index: ql/src/test/results/clientnegative/uniquejoin.q.out =================================================================== --- ql/src/test/results/clientnegative/uniquejoin.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/uniquejoin.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Subqueries are not supported in UNIQUEJOIN +FAILED: SemanticException Subqueries are not supported in UNIQUEJOIN Index: ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_size_wrong_args_len.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:7 Arguments length mismatch 'lintstring': The function SIZE only accepts 1 argument. +FAILED: SemanticException [Error 10015]: Line 2:7 Arguments length mismatch 'lintstring': The function SIZE only accepts 1 argument. Index: ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_when_type_wrong2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 3:22 Argument type mismatch '4': The expressions after THEN should have the same type: "string" is expected but "int" is found +FAILED: SemanticException [Error 10016]: Line 3:22 Argument type mismatch '4': The expressions after THEN should have the same type: "string" is expected but "int" is found Index: ql/src/test/results/clientnegative/clusterbyorderby.q.out =================================================================== --- ql/src/test/results/clientnegative/clusterbyorderby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clusterbyorderby.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 5:0 mismatched input 'ORDER' expecting EOF near 'tkey' +FAILED: ParseException line 5:0 mismatched input 'ORDER' expecting EOF near 'tkey' Index: ql/src/test/results/clientnegative/udtf_not_supported2.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_not_supported2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_not_supported2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 1:7 UDTF's require an AS clause. Error encountered near token '3' +FAILED: SemanticException 1:7 UDTF's require an AS clause. Error encountered near token '3' Index: ql/src/test/results/clientnegative/udf_concat_ws_wrong2.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_concat_ws_wrong2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_concat_ws_wrong2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:23 Argument type mismatch '50': Argument 2 of function CONCAT_WS must be "string or array", but "array" was found. +FAILED: SemanticException [Error 10016]: Line 2:23 Argument type mismatch '50': Argument 2 of function CONCAT_WS must be "string or array", but "array" was found. Index: ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_10_nonpart_noncompat_bucketing.q.out (working copy) @@ -48,4 +48,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table bucketing spec does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Table bucketing spec does not match Index: ql/src/test/results/clientnegative/invalid_t_alter1.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_alter1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_t_alter1.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE alter_test (d STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@alter_test -FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_var_samp_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_var_samp_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_var_samp_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/ctas.q.out =================================================================== --- ql/src/test/results/clientnegative/ctas.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/ctas.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: CREATE-TABLE-AS-SELECT cannot create external table +FAILED: SemanticException [Error 10070]: CREATE-TABLE-AS-SELECT cannot create external table Index: ql/src/test/results/clientnegative/invalid_cast_to_binary_5.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_to_binary_5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_to_binary_5.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. +FAILED: SemanticException Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. Index: ql/src/test/results/clientnegative/semijoin1.q.out =================================================================== --- ql/src/test/results/clientnegative/semijoin1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/semijoin1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:7 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) +FAILED: SemanticException [Error 10004]: Line 2:7 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) Index: ql/src/test/results/clientnegative/dyn_part1.q.out =================================================================== --- ql/src/test/results/clientnegative/dyn_part1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/dyn_part1.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table dynamic_partition (key string) partitioned by (value string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dynamic_partition -FAILED: Error in semantic analysis: Non-Partition column appears in the partition specification: hr +FAILED: SemanticException [Error 10098]: Non-Partition column appears in the partition specification: hr Index: ql/src/test/results/clientnegative/udf_field_wrong_args_len.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_field_wrong_args_len.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_field_wrong_args_len.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Wrong arguments '3': The function FIELD(str, str1, str2, ...) needs at least two arguments. +FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments '3': The function FIELD(str, str1, str2, ...) needs at least two arguments. Index: ql/src/test/results/clientnegative/udf_elt_wrong_args_len.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_elt_wrong_args_len.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_elt_wrong_args_len.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Arguments length mismatch '3': The function ELT(N,str1,str2,str3,...) needs at least two arguments. +FAILED: SemanticException [Error 10015]: Line 1:7 Arguments length mismatch '3': The function ELT(N,str1,str2,str3,...) needs at least two arguments. Index: ql/src/test/results/clientnegative/protectmode_tbl3.q.out =================================================================== --- ql/src/test/results/clientnegative/protectmode_tbl3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/protectmode_tbl3.q.out (working copy) @@ -34,4 +34,4 @@ col string #### A masked pattern was here #### -FAILED: Error in semantic analysis: Query against an offline table or partition Table tbl_protectmode_4 +FAILED: SemanticException [Error 10113]: Query against an offline table or partition Table tbl_protectmode_4 Index: ql/src/test/results/clientnegative/split_sample_out_of_range.q.out =================================================================== --- ql/src/test/results/clientnegative/split_sample_out_of_range.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/split_sample_out_of_range.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 3:32 Sampling percentage should be between 0 and 100. Error encountered near token '105' +FAILED: SemanticException 3:32 Sampling percentage should be between 0 and 100. Error encountered near token '105' Index: ql/src/test/results/clientnegative/nopart_insert.q.out =================================================================== --- ql/src/test/results/clientnegative/nopart_insert.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/nopart_insert.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE nopart_insert(a STRING, b STRING) PARTITIONED BY (ds STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@nopart_insert -FAILED: Error in semantic analysis: 3:23 Need to specify partition columns because the destination table is partitioned. Error encountered near token 'nopart_insert' +FAILED: SemanticException 3:23 Need to specify partition columns because the destination table is partitioned. Error encountered near token 'nopart_insert' Index: ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_07_nonpart_noncompat_ifof.q.out (working copy) @@ -52,4 +52,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match Index: ql/src/test/results/clientnegative/create_view_failure6.q.out =================================================================== --- ql/src/test/results/clientnegative/create_view_failure6.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_view_failure6.q.out (working copy) @@ -2,4 +2,4 @@ PREHOOK: type: DROPVIEW POSTHOOK: query: DROP VIEW xxx15 POSTHOOK: type: DROPVIEW -FAILED: Error in semantic analysis: Rightmost columns in view output do not match PARTITIONED ON clause +FAILED: SemanticException [Error 10093]: Rightmost columns in view output do not match PARTITIONED ON clause Index: ql/src/test/results/clientnegative/alter_view_failure5.q.out =================================================================== --- ql/src/test/results/clientnegative/alter_view_failure5.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/alter_view_failure5.q.out (working copy) @@ -15,4 +15,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@xxx6 #### A masked pattern was here #### -FAILED: Error in semantic analysis: value not found in table's partition spec: {v=val_86} +FAILED: SemanticException value not found in table's partition spec: {v=val_86} Index: ql/src/test/results/clientnegative/merge_negative_1.q.out =================================================================== --- ql/src/test/results/clientnegative/merge_negative_1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/merge_negative_1.q.out (working copy) @@ -8,4 +8,4 @@ POSTHOOK: query: CREATE INDEX src_index_merge_test ON TABLE src2(key) as 'COMPACT' WITH DEFERRED REBUILD POSTHOOK: type: CREATEINDEX POSTHOOK: Output: default@default__src2_src_index_merge_test__ -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: can not do merge because source table src2 is indexed. +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: can not do merge because source table src2 is indexed. Index: ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_21_part_managed_external.q.out (working copy) @@ -90,4 +90,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_employee -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first. +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first. Index: ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_06_nonpart_noncompat_storage.q.out (working copy) @@ -46,4 +46,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Table inputformat/outputformats do not match Index: ql/src/test/results/clientnegative/union3.q.out =================================================================== --- ql/src/test/results/clientnegative/union3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/union3.q.out (working copy) @@ -7,4 +7,4 @@ CREATE TABLE IF NOT EXISTS union3 (bar int, baz int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@union3 -FAILED: Error in semantic analysis: 2:85 Schema of both sides of union should match: field bar: appears on the left side of the UNION at column position: 0, and on the right side of the UNION at column position: 1. Column positions should match for a UNION. Error encountered near token 'union3' +FAILED: SemanticException 2:85 Schema of both sides of union should match: field bar: appears on the left side of the UNION at column position: 0, and on the right side of the UNION at column position: 1. Column positions should match for a UNION. Error encountered near token 'union3' Index: ql/src/test/results/clientnegative/clustern2.q.out =================================================================== --- ql/src/test/results/clientnegative/clustern2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clustern2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Column key Found in more than One Tables/Subqueries +FAILED: SemanticException Column key Found in more than One Tables/Subqueries Index: ql/src/test/results/clientnegative/udf_case_type_wrong.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_case_type_wrong.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_case_type_wrong.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:13 Argument type mismatch '1': The expressions after WHEN should have the same type with that after CASE: "string" is expected but "int" is found +FAILED: SemanticException [Error 10016]: Line 2:13 Argument type mismatch '1': The expressions after WHEN should have the same type with that after CASE: "string" is expected but "int" is found Index: ql/src/test/results/clientnegative/fileformat_void_input.q.out =================================================================== --- ql/src/test/results/clientnegative/fileformat_void_input.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/fileformat_void_input.q.out (working copy) @@ -19,4 +19,4 @@ POSTHOOK: Output: default@dest1 POSTHOOK: Lineage: dest1.key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: dest1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: 3:20 Input format must implement InputFormat. Error encountered near token 'dest1' +FAILED: SemanticException 3:20 Input format must implement InputFormat. Error encountered near token 'dest1' Index: ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_16_part_noncompat_schema.q.out (working copy) @@ -51,4 +51,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Partition Schema does not match Index: ql/src/test/results/clientnegative/udf_in.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_in.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_in.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:9 Wrong arguments '3': The arguments for IN should be the same type! Types are: {int IN (array)} +FAILED: SemanticException [Error 10014]: Line 1:9 Wrong arguments '3': The arguments for IN should be the same type! Types are: {int IN (array)} Index: ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_01_nonpart_over_loaded.q.out (working copy) @@ -52,4 +52,4 @@ POSTHOOK: query: load data local inpath "../data/files/test.dat" into table exim_department POSTHOOK: type: LOAD POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: Table exists and contains data files +FAILED: SemanticException [Error 10119]: Table exists and contains data files Index: ql/src/test/results/clientnegative/udf_sort_array_wrong3.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_sort_array_wrong3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_sort_array_wrong3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:18 Argument type mismatch '13': Argument 1 of function SORT_ARRAY must be array, but array> was found. +FAILED: SemanticException [Error 10016]: Line 2:18 Argument type mismatch '13': Argument 1 of function SORT_ARRAY must be array, but array> was found. Index: ql/src/test/results/clientnegative/archive_insert2.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_insert2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_insert2.q.out (working copy) @@ -31,4 +31,4 @@ POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12 POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Insert conflict with existing archive: ds=2008-04-08 +FAILED: SemanticException Insert conflict with existing archive: ds=2008-04-08 Index: ql/src/test/results/clientnegative/udtf_explode_not_supported4.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_explode_not_supported4.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_explode_not_supported4.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: GROUP BY is not supported with a UDTF in the SELECT clause +FAILED: SemanticException [Error 10077]: GROUP BY is not supported with a UDTF in the SELECT clause Index: ql/src/test/results/clientnegative/archive3.q.out =================================================================== --- ql/src/test/results/clientnegative/archive3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: ARCHIVE can only be run on partitions +FAILED: SemanticException [Error 10110]: ARCHIVE can only be run on partitions Index: ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out =================================================================== --- ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/mapreduce_stack_trace.q.out (working copy) @@ -10,4 +10,4 @@ Hive Runtime Error while processing row {"key":"238","value":"val_238"} FATAL ExecMapper: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"key":"238","value":"val_238"} Hive Runtime Error while processing row {"key":"238","value":"val_238"} -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 20000 from org.apache.hadoop.hive.ql.exec.MapRedTask. Unable to initialize custom script. Index: ql/src/test/results/clientnegative/strict_pruning.q.out =================================================================== --- ql/src/test/results/clientnegative/strict_pruning.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/strict_pruning.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: No partition predicate found for Alias "srcpart" Table "srcpart" +FAILED: SemanticException [Error 10041]: No partition predicate found for Alias "srcpart" Table "srcpart" Index: ql/src/test/results/clientnegative/udf_map_keys_arg_num.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_map_keys_arg_num.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_map_keys_arg_num.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Arguments length mismatch '"2"': The function MAP_KEYS only accepts one argument. +FAILED: SemanticException [Error 10015]: Line 1:7 Arguments length mismatch '"2"': The function MAP_KEYS only accepts one argument. Index: ql/src/test/results/clientnegative/create_or_replace_view8.q.out =================================================================== --- ql/src/test/results/clientnegative/create_or_replace_view8.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_or_replace_view8.q.out (working copy) @@ -13,4 +13,4 @@ POSTHOOK: type: CREATEVIEW POSTHOOK: Output: default@v1 #### A masked pattern was here #### -FAILED: Error in semantic analysis: Recursive view default.v1 detected (cycle: default.v1 -> default.v1). +FAILED: SemanticException Recursive view default.v1 detected (cycle: default.v1 -> default.v1). Index: ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_if_wrong_args_len.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 1:7 Arguments length mismatch '1': The function IF(expr1,expr2,expr3) accepts exactly 3 arguments. +FAILED: SemanticException [Error 10015]: Line 1:7 Arguments length mismatch '1': The function IF(expr1,expr2,expr3) accepts exactly 3 arguments. Index: ql/src/test/results/clientnegative/index_compact_entry_limit.q.out =================================================================== --- ql/src/test/results/clientnegative/index_compact_entry_limit.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/index_compact_entry_limit.q.out (working copy) @@ -32,7 +32,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@src #### A masked pattern was here #### -Execution failed with exit status: 2 +Execution failed with exit status: 1 Obtaining error information Task failed! @@ -42,4 +42,4 @@ Logs: #### A masked pattern was here #### -FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.MapRedTask Index: ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_15_part_nonpart.q.out (working copy) @@ -49,4 +49,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Partition Schema does not match Index: ql/src/test/results/clientnegative/archive_partspec1.q.out =================================================================== --- ql/src/test/results/clientnegative/archive_partspec1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/archive_partspec1.q.out (working copy) @@ -21,4 +21,4 @@ POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12 POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -FAILED: Error in semantic analysis: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (ds, nonexistingpart). +FAILED: SemanticException [Error 10125]: Partition columns in partition specification are not the same as that defined in the table schema. The names and orders have to be exactly the same. Partition columns in the table schema are: (ds, hr), while the partitions specified in the query are: (ds, nonexistingpart). Index: ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table tbl (a binary) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@tbl -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(void) _FUNC_(boolean) _FUNC_(smallint) _FUNC_(int) _FUNC_(bigint) _FUNC_(float) _FUNC_(double) _FUNC_(string) _FUNC_(timestamp) Index: ql/src/test/results/clientnegative/bad_indextype.q.out =================================================================== --- ql/src/test/results/clientnegative/bad_indextype.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/bad_indextype.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: class name provided for index handler not found. +FAILED: SemanticException class name provided for index handler not found. Index: ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_19_external_over_existing.q.out (working copy) @@ -46,4 +46,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first. +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. External table cannot overwrite existing table. Drop existing table first. Index: ql/src/test/results/clientnegative/bad_sample_clause.q.out =================================================================== --- ql/src/test/results/clientnegative/bad_sample_clause.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/bad_sample_clause.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, value STRING, dt STRING, hr STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: Sampling expression needed for non-bucketed table srcpart +FAILED: SemanticException [Error 10060]: Sampling expression needed for non-bucketed table srcpart Index: ql/src/test/results/clientnegative/union.q.out =================================================================== --- ql/src/test/results/clientnegative/union.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/union.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: 2:45 Top level UNION is not supported currently; use a subquery for the UNION. Error encountered near token 'value' +FAILED: SemanticException 2:45 Top level UNION is not supported currently; use a subquery for the UNION. Error encountered near token 'value' Index: ql/src/test/results/clientnegative/invalid_min_syntax.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_min_syntax.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_min_syntax.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: The specified syntax for UDAF invocation is invalid. +FAILED: SemanticException The specified syntax for UDAF invocation is invalid. Index: ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_when_type_wrong3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 4:13 Argument type mismatch '5.3': The expression after ELSE should have the same type as those after THEN: "string" is expected but "double" is found +FAILED: SemanticException [Error 10016]: Line 4:13 Argument type mismatch '5.3': The expression after ELSE should have the same type as those after THEN: "string" is expected but "double" is found Index: ql/src/test/results/clientnegative/drop_index_failure.q.out =================================================================== --- ql/src/test/results/clientnegative/drop_index_failure.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/drop_index_failure.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Invalid index UnknownIndex +FAILED: SemanticException [Error 10003]: Invalid index UnknownIndex Index: ql/src/test/results/clientnegative/create_table_failure1.q.out =================================================================== --- ql/src/test/results/clientnegative/create_table_failure1.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/create_table_failure1.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: org.apache.hadoop.hive.ql.parse.SemanticException: Database does not exist: table_in_database_creation_not_exist +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Database does not exist: table_in_database_creation_not_exist Index: ql/src/test/results/clientnegative/clusterbydistributeby.q.out =================================================================== --- ql/src/test/results/clientnegative/clusterbydistributeby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/clusterbydistributeby.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: 8:14 Cannot have both CLUSTER BY and DISTRIBUTE BY clauses. Error encountered near token 'tkey' +FAILED: SemanticException 8:14 Cannot have both CLUSTER BY and DISTRIBUTE BY clauses. Error encountered near token 'tkey' Index: ql/src/test/results/clientnegative/orderbysortby.q.out =================================================================== --- ql/src/test/results/clientnegative/orderbysortby.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/orderbysortby.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE dest1(key INT, ten INT, one INT, value STRING) STORED AS TEXTFILE POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@dest1 -FAILED: Error in semantic analysis: 8:8 Cannot have both ORDER BY and SORT BY clauses. Error encountered near token 'one' +FAILED: SemanticException 8:8 Cannot have both ORDER BY and SORT BY clauses. Error encountered near token 'one' Index: ql/src/test/results/clientnegative/udtf_not_supported3.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_not_supported3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_not_supported3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: GROUP BY is not supported with a UDTF in the SELECT clause +FAILED: SemanticException [Error 10077]: GROUP BY is not supported with a UDTF in the SELECT clause Index: ql/src/test/results/clientnegative/udtf_invalid_place.q.out =================================================================== --- ql/src/test/results/clientnegative/udtf_invalid_place.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udtf_invalid_place.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: UDTF's are not supported outside the SELECT clause, nor nested in expressions +FAILED: SemanticException [Error 10081]: UDTF's are not supported outside the SELECT clause, nor nested in expressions Index: ql/src/test/results/clientnegative/udf_concat_ws_wrong3.q.out =================================================================== --- ql/src/test/results/clientnegative/udf_concat_ws_wrong3.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/udf_concat_ws_wrong3.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:17 Argument type mismatch '1234': Argument 1 of function CONCAT_WS must be "string or array", but "int" was found. +FAILED: SemanticException [Error 10016]: Line 2:17 Argument type mismatch '1234': Argument 1 of function CONCAT_WS must be "string or array", but "int" was found. Index: ql/src/test/results/clientnegative/invalid_t_alter2.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_t_alter2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_t_alter2.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: CREATE TABLE alter_test (d STRING) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@alter_test -FAILED: Error in semantic analysis: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead +FAILED: SemanticException [Error 10099]: DATE and DATETIME types aren't supported yet. Please use TIMESTAMP instead Index: ql/src/test/results/clientnegative/invalid_cast_to_binary_6.q.out =================================================================== --- ql/src/test/results/clientnegative/invalid_cast_to_binary_6.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/invalid_cast_to_binary_6.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. +FAILED: SemanticException Line 0:-1 Wrong arguments '2': Only string or binary data can be cast into binary data types. Index: ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out =================================================================== --- ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/exim_14_nonpart_part.q.out (working copy) @@ -48,4 +48,4 @@ tblproperties("creator"="krishna") POSTHOOK: type: CREATETABLE POSTHOOK: Output: importer@exim_department -FAILED: Error in semantic analysis: The existing table is not compatible with the import spec. Partition Schema does not match +FAILED: SemanticException [Error 10120]: The existing table is not compatible with the import spec. Partition Schema does not match Index: ql/src/test/results/clientnegative/lateral_view_join.q.out =================================================================== --- ql/src/test/results/clientnegative/lateral_view_join.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/lateral_view_join.q.out (working copy) @@ -1,2 +1,2 @@ -FAILED: Parse Error: line 1:59 mismatched input 'AS' expecting Identifier near ')' in table alias +FAILED: ParseException line 1:59 mismatched input 'AS' expecting Identifier near ')' in table alias Index: ql/src/test/results/clientnegative/semijoin2.q.out =================================================================== --- ql/src/test/results/clientnegative/semijoin2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/semijoin2.q.out (working copy) @@ -1 +1 @@ -FAILED: Error in semantic analysis: Line 2:70 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) +FAILED: SemanticException [Error 10004]: Line 2:70 Invalid table alias or column reference 'b': (possible column names are: _col0, _col1) Index: ql/src/test/results/clientnegative/dyn_part2.q.out =================================================================== --- ql/src/test/results/clientnegative/dyn_part2.q.out (revision 1336457) +++ ql/src/test/results/clientnegative/dyn_part2.q.out (working copy) @@ -3,4 +3,4 @@ POSTHOOK: query: create table nzhang_part1 (key string, value string) partitioned by (ds string, hr string) POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@nzhang_part1 -FAILED: Error in semantic analysis: Line 3:23 Cannot insert into target table because column number/types are different 'hr': Table insclause-0 has 3 columns, but query has 2 columns. +FAILED: SemanticException [Error 10044]: Line 3:23 Cannot insert into target table because column number/types are different 'hr': Table insclause-0 has 3 columns, but query has 2 columns. Index: ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java (revision 1336457) +++ ql/src/test/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java (working copy) @@ -41,8 +41,7 @@ driver.run("drop table testDL"); CommandProcessorResponse resp = driver.run("create table testDL (a int) as select * from tbl2"); - assertEquals(10, resp.getResponseCode()); - assertTrue(resp.getErrorMessage().contains("CTAS not supported.")); + assertEquals(40000, resp.getResponseCode()); resp = driver.run("create table testDL (a int)"); assertEquals(0, resp.getResponseCode()); Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRFileSink1.java (working copy) @@ -30,6 +30,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.ConditionalTask; @@ -50,7 +51,6 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRMapJoinCtx; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -50,7 +51,6 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.Transform; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.parse.SemanticException; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (working copy) @@ -32,6 +32,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.GroupByOperator; @@ -53,7 +54,6 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.GenMapRedWalker; import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.parse.ParseContext; Index: ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinFactory.java (working copy) @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; @@ -39,7 +40,6 @@ import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMRMapJoinCtx; import org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx; import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; Index: ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/lockmgr/zookeeper/ZooKeeperHiveLockManager.java (working copy) @@ -39,7 +39,7 @@ import java.util.regex.Matcher; import org.apache.zookeeper.KeeperException; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager; import org.apache.hadoop.hive.ql.lockmgr.HiveLockManagerCtx; import org.apache.hadoop.hive.ql.lockmgr.HiveLock; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java (working copy) @@ -25,8 +25,11 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.errors.ErrorAndSolution; import org.apache.hadoop.hive.ql.exec.errors.TaskLogProcessor; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; @@ -48,21 +51,34 @@ private final Map failures = new HashMap(); private final Set successes = new HashSet(); // Successful task ID's private final Map taskIdToInfo = new HashMap(); + private int maxFailures = 0; // Used for showJobFailDebugInfo private static class TaskInfo { String jobId; Set logUrls; + int errorCode; // Obtained from the HiveException thrown + String[] diagnosticMesgs; public TaskInfo(String jobId) { this.jobId = jobId; logUrls = new HashSet(); + errorCode = 0; + diagnosticMesgs = null; } public void addLogUrl(String logUrl) { logUrls.add(logUrl); } + public void setErrorCode(int errorCode) { + this.errorCode = errorCode; + } + + public void setDiagnosticMesgs(String[] diagnosticMesgs) { + this.diagnosticMesgs = diagnosticMesgs; + } + public Set getLogUrls() { return logUrls; } @@ -70,6 +86,14 @@ public String getJobId() { return jobId; } + + public int getErrorCode() { + return errorCode; + } + + public String[] getDiagnosticMesgs() { + return diagnosticMesgs; + } } public JobDebugger(JobConf conf, RunningJob rj, LogHelper console) { @@ -98,17 +122,31 @@ return taskTrackerHttpAddress + "/tasklog?taskid=" + taskAttemptId + "&start=-8193"; } - class TaskLogGrabber implements Runnable { + public static int extractErrorCode(String[] diagnostics) { + int result = 0; + Pattern errorCodeRegex = ErrorMsg.getErrorCodePattern(); + for (String mesg : diagnostics) { + Matcher matcher = errorCodeRegex.matcher(mesg); + if (matcher.find()) { + result = Integer.parseInt(matcher.group(1)); + // We don't exit the loop early because we want to extract the error code + // corresponding to the bottommost error coded exception. + } + } + return result; + } + class TaskInfoGrabber implements Runnable { + public void run() { try { - getTaskLogs(); + getTaskInfos(); } catch (IOException e) { console.printError(e.getMessage()); } } - private void getTaskLogs() throws IOException { + private void getTaskInfos() throws IOException { int startIndex = 0; while (true) { TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(startIndex); @@ -148,11 +186,16 @@ assert (ti.getJobId() != null && ti.getJobId().equals(jobId)); ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), t.getTaskId())); - // If a task failed, then keep track of the total number of failures - // for that task (typically, a task gets re-run up to 4 times if it - // fails + // If a task failed, fetch its error code (if available). + // Also keep track of the total number of failures for that + // task (typically, a task gets re-run up to 4 times if it fails. + if (t.getTaskStatus() != TaskCompletionEvent.Status.SUCCEEDED) { + if (ti.getErrorCode() == 0) { + String[] diags = rj.getTaskDiagnostics(t.getTaskAttemptId()); + ti.setErrorCode(extractErrorCode(diags)); + ti.setDiagnosticMesgs(diags); + } - if (t.getTaskStatus() != TaskCompletionEvent.Status.SUCCEEDED) { Integer failAttempts = failures.get(taskId); if (failAttempts == null) { failAttempts = Integer.valueOf(0); @@ -171,14 +214,21 @@ } } + private void computeMaxFailures() { + maxFailures = 0; + for (Integer failCount : failures.values()) { + if (maxFailures < failCount.intValue()) { + maxFailures = failCount.intValue(); + } + } + } + @SuppressWarnings("deprecation") private void showJobFailDebugInfo() throws IOException { - - console.printError("Error during job, obtaining debugging information..."); // Loop to get all task completion events because getTaskCompletionEvents // only returns a subset per call - TaskLogGrabber tlg = new TaskLogGrabber(); + TaskInfoGrabber tlg = new TaskInfoGrabber(); Thread t = new Thread(tlg); try { t.start(); @@ -196,23 +246,24 @@ if (failures.keySet().size() == 0) { return; } - // Find the highest failure count - int maxFailures = 0; - for (Integer failCount : failures.values()) { - if (maxFailures < failCount.intValue()) { - maxFailures = failCount.intValue(); - } - } + computeMaxFailures() ; // Display Error Message for tasks with the highest failure count - String jtUrl = JobTrackerURLResolver.getURL(conf); + String jtUrl = null; + try { + jtUrl = JobTrackerURLResolver.getURL(conf); + } catch (Exception e) { + console.printError("Unable to retrieve URL for Hadoop Task logs. " + + e.getMessage()); + } for (String task : failures.keySet()) { if (failures.get(task).intValue() == maxFailures) { TaskInfo ti = taskIdToInfo.get(task); String jobId = ti.getJobId(); - String taskUrl = jtUrl + "/taskdetails.jsp?jobid=" + jobId + "&tipid=" + task.toString(); + String taskUrl = (jtUrl == null) ? "Unavailable" : + jtUrl + "/taskdetails.jsp?jobid=" + jobId + "&tipid=" + task.toString(); TaskLogProcessor tlp = new TaskLogProcessor(conf); for (String logUrl : ti.getLogUrls()) { @@ -248,6 +299,11 @@ } sb.append("-----\n"); + sb.append("Diagnostic Messages for this Task:\n"); + String[] diagMesgs = ti.getDiagnosticMesgs(); + for (String mesg : diagMesgs) { + sb.append(mesg + "\n"); + } console.printError(sb.toString()); } @@ -256,6 +312,16 @@ } } return; + } + public int getErrorCode() { + for (String task : failures.keySet()) { + if (failures.get(task).intValue() == maxFailures) { + TaskInfo ti = taskIdToInfo.get(task); + return ti.getErrorCode(); + } + } + // Should never reach here unless there were no failed tasks. + return 0; } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (working copy) @@ -712,6 +712,10 @@ Thread t = new Thread(jd); t.start(); t.join(HiveConf.getIntVar(job, HiveConf.ConfVars.JOB_DEBUG_TIMEOUT)); + int ec = jd.getErrorCode(); + if (ec > 0) { + returnVal = ec; + } } catch (InterruptedException e) { console.printError("Timed out trying to grab more detailed job failure" + " information, please check jobtracker for more info"); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java (working copy) @@ -694,7 +694,7 @@ } if (ret != 0) { - System.exit(2); + System.exit(ret); } } Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -92,6 +92,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.io.ContentSummaryInputFormat; @@ -105,7 +106,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ScriptDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; @@ -214,7 +215,7 @@ // initialize all children before starting the script initializeChildren(hconf); } catch (Exception e) { - throw new HiveException("Cannot initialize ScriptOperator", e); + throw new HiveException(ErrorMsg.SCRIPT_INIT_ERROR.getErrorCodedMsg(), e); } } @@ -317,12 +318,12 @@ outThread.start(); errThread.start(); } catch (Exception e) { - throw new HiveException("Cannot initialize ScriptOperator", e); + throw new HiveException(ErrorMsg.SCRIPT_INIT_ERROR.getErrorCodedMsg(), e); } } if (scriptError != null) { - throw new HiveException(scriptError); + throw new HiveException(ErrorMsg.SCRIPT_GENERIC_ERROR.getErrorCodedMsg(), scriptError); } try { @@ -345,7 +346,7 @@ displayBrokenPipeInfo(); } scriptError = e; - throw new HiveException(e); + throw new HiveException(ErrorMsg.SCRIPT_IO_ERROR.getErrorCodedMsg(), e); } } } @@ -356,7 +357,7 @@ boolean new_abort = abort; if (!abort) { if (scriptError != null) { - throw new HiveException(scriptError); + throw new HiveException(ErrorMsg.SCRIPT_GENERIC_ERROR.getErrorCodedMsg(), scriptError); } // everything ok. try normal shutdown try { @@ -449,7 +450,7 @@ super.close(new_abort); if (new_abort && !abort) { - throw new HiveException("Hit error while closing .."); + throw new HiveException(ErrorMsg.SCRIPT_CLOSING_ERROR.getErrorCodedMsg()); } } Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java (working copy) @@ -26,10 +26,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.Utilities; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java (working copy) @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (working copy) @@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java (working copy) @@ -31,6 +31,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (working copy) @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.exec.Utilities; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (working copy) @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (working copy) @@ -1,375 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.ql.parse; - -import java.util.HashMap; -import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.antlr.runtime.tree.Tree; -import org.apache.hadoop.hive.ql.metadata.HiveUtils; - -/** - * List of error messages thrown by the parser. - **/ - -public enum ErrorMsg { - // SQLStates are taken from Section 12.5 of ISO-9075. - // See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt - // Most will just rollup to the generic syntax error state of 42000, but - // specific errors can override the that state. - // See this page for how MySQL uses SQLState codes: - // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html - - GENERIC_ERROR("Exception while processing"), - INVALID_TABLE("Table not found", "42S02"), - INVALID_COLUMN("Invalid column reference"), - INVALID_INDEX("Invalid index"), - INVALID_TABLE_OR_COLUMN("Invalid table alias or column reference"), - AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous table alias or column reference"), - INVALID_PARTITION("Partition not found"), - AMBIGUOUS_COLUMN("Ambiguous column reference"), - AMBIGUOUS_TABLE_ALIAS("Ambiguous table alias"), - INVALID_TABLE_ALIAS("Invalid table alias"), - NO_TABLE_ALIAS("No table alias"), - INVALID_FUNCTION("Invalid function"), - INVALID_FUNCTION_SIGNATURE("Function argument type mismatch"), - INVALID_OPERATOR_SIGNATURE("Operator argument type mismatch"), - INVALID_ARGUMENT("Wrong arguments"), - INVALID_ARGUMENT_LENGTH("Arguments length mismatch", "21000"), - INVALID_ARGUMENT_TYPE("Argument type mismatch"), - INVALID_JOIN_CONDITION_1("Both left and right aliases encountered in JOIN"), - INVALID_JOIN_CONDITION_2("Neither left nor right aliases encountered in JOIN"), - INVALID_JOIN_CONDITION_3("OR not supported in JOIN currently"), - INVALID_TRANSFORM("TRANSFORM with other SELECT columns not supported"), - DUPLICATE_GROUPBY_KEY("Repeated key in GROUP BY"), - UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on different columns not supported with skew in data"), - NO_SUBQUERY_ALIAS("No alias for subquery"), - NO_INSERT_INSUBQUERY("Cannot insert in a subquery. Inserting to table "), - NON_KEY_EXPR_IN_GROUPBY("Expression not in GROUP BY key"), - INVALID_XPATH("General . and [] operators are not supported"), - INVALID_PATH("Invalid path"), ILLEGAL_PATH("Path is not legal"), - INVALID_NUMERICAL_CONSTANT("Invalid numerical constant"), - INVALID_ARRAYINDEX_CONSTANT("Non-constant expressions for array indexes not supported"), - INVALID_MAPINDEX_CONSTANT("Non-constant expression for map indexes not supported"), - INVALID_MAPINDEX_TYPE("MAP key type does not match index expression type"), - NON_COLLECTION_TYPE("[] not valid on non-collection types"), - SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"), - COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), - DUPLICATE_COLUMN_NAMES("Duplicate column name:"), - INVALID_BUCKET_NUMBER("Bucket number should be bigger than zero"), - COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in CLUSTER BY and SORT BY"), - SAMPLE_RESTRICTION("Cannot SAMPLE on more than two columns"), - SAMPLE_COLUMN_NOT_FOUND("SAMPLE column not found"), - NO_PARTITION_PREDICATE("No partition predicate found"), - INVALID_DOT(". Operator is only supported on struct or list of struct types"), - INVALID_TBL_DDL_SERDE("Either list of columns or a custom serializer should be specified"), - TARGET_TABLE_COLUMN_MISMATCH( - "Cannot insert into target table because column number/types are different"), - TABLE_ALIAS_NOT_ALLOWED("Table alias not allowed in sampling clause"), - CLUSTERBY_DISTRIBUTEBY_CONFLICT("Cannot have both CLUSTER BY and DISTRIBUTE BY clauses"), - ORDERBY_DISTRIBUTEBY_CONFLICT("Cannot have both ORDER BY and DISTRIBUTE BY clauses"), - CLUSTERBY_SORTBY_CONFLICT("Cannot have both CLUSTER BY and SORT BY clauses"), - ORDERBY_SORTBY_CONFLICT("Cannot have both ORDER BY and SORT BY clauses"), - CLUSTERBY_ORDERBY_CONFLICT("Cannot have both CLUSTER BY and ORDER BY clauses"), - NO_LIMIT_WITH_ORDERBY("In strict mode, if ORDER BY is specified, LIMIT must also be specified"), - NO_CARTESIAN_PRODUCT("In strict mode, cartesian product is not allowed. " - + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - UNION_NOTIN_SUBQ("Top level UNION is not supported currently; use a subquery for the UNION"), - INVALID_INPUT_FORMAT_TYPE("Input format must implement InputFormat"), - INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement HiveOutputFormat, " - + "otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"), - NO_VALID_PARTN("The query does not reference any valid partition. " - + "To run this query, set hive.mapred.mode=nonstrict"), - NO_OUTER_MAPJOIN("MAPJOIN cannot be performed with OUTER JOIN"), - INVALID_MAPJOIN_HINT("Neither table specified as map-table"), - INVALID_MAPJOIN_TABLE("Result of a union cannot be a map table"), - NON_BUCKETED_TABLE("Sampling expression needed for non-bucketed table"), - BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than " - + "denaminator in sample clause for table"), - NEED_PARTITION_ERROR("Need to specify partition columns because the destination " - + "table is partitioned"), - CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in " - + "the same command"), - LINES_TERMINATED_BY_NON_NEWLINE("LINES TERMINATED BY only supports newline '\\n' right now"), - CTAS_COLLST_COEXISTENCE("CREATE TABLE AS SELECT command cannot specify the list of columns " - + "for the target table"), - CTLT_COLLST_COEXISTENCE("CREATE TABLE LIKE command cannot specify the list of columns for " - + "the target table"), - INVALID_SELECT_SCHEMA("Cannot derive schema from the select-clause"), - CTAS_PARCOL_COEXISTENCE("CREATE-TABLE-AS-SELECT does not support partitioning in the target " - + "table"), - CTAS_MULTI_LOADFILE("CREATE-TABLE-AS-SELECT results in multiple file load"), - CTAS_EXTTBL_COEXISTENCE("CREATE-TABLE-AS-SELECT cannot create external table"), - INSERT_EXTERNAL_TABLE("Inserting into a external table is not allowed"), - DATABASE_NOT_EXISTS("Database does not exist:"), - TABLE_ALREADY_EXISTS("Table already exists:", "42S02"), - COLUMN_ALIAS_ALREADY_EXISTS("Column alias already exists:", "42S02"), - UDTF_MULTIPLE_EXPR("Only a single expression in the SELECT clause is supported with UDTF's"), - UDTF_REQUIRE_AS("UDTF's require an AS clause"), - UDTF_NO_GROUP_BY("GROUP BY is not supported with a UDTF in the SELECT clause"), - UDTF_NO_SORT_BY("SORT BY is not supported with a UDTF in the SELECT clause"), - UDTF_NO_CLUSTER_BY("CLUSTER BY is not supported with a UDTF in the SELECT clause"), - UDTF_NO_DISTRIBUTE_BY("DISTRUBTE BY is not supported with a UDTF in the SELECT clause"), - UDTF_INVALID_LOCATION("UDTF's are not supported outside the SELECT clause, nor nested " - + "in expressions"), - UDAF_INVALID_LOCATION("Not yet supported place for UDAF"), - UDTF_LATERAL_VIEW("UDTF's cannot be in a select expression when there is a lateral view"), - UDTF_ALIAS_MISMATCH("The number of aliases supplied in the AS clause does not match the " - + "number of columns output by the UDTF"), - UDF_STATEFUL_INVALID_LOCATION("Stateful UDF's can only be invoked in the SELECT list"), - LATERAL_VIEW_WITH_JOIN("JOIN with a LATERAL VIEW is not supported"), - LATERAL_VIEW_INVALID_CHILD("LATERAL VIEW AST with invalid child"), - OUTPUT_SPECIFIED_MULTIPLE_TIMES("The same output cannot be present multiple times: "), - INVALID_AS("AS clause has an invalid number of aliases"), - VIEW_COL_MISMATCH("The number of columns produced by the SELECT clause does not match the " - + "number of column names specified by CREATE VIEW"), - DML_AGAINST_VIEW("A view cannot be used as target table for LOAD or INSERT"), - ANALYZE_VIEW("ANALYZE is not supported for views"), - VIEW_PARTITION_TOTAL("At least one non-partitioning column must be present in view"), - VIEW_PARTITION_MISMATCH("Rightmost columns in view output do not match PARTITIONED ON clause"), - PARTITION_DYN_STA_ORDER("Dynamic partition cannot be the parent of a static partition"), - DYNAMIC_PARTITION_DISABLED("Dynamic partition is disabled. Either enable it by setting " - + "hive.exec.dynamic.partition=true or specify partition column values"), - DYNAMIC_PARTITION_STRICT_MODE("Dynamic partition strict mode requires at least one " - + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"), - DYNAMIC_PARTITION_MERGE("Dynamic partition does not support merging using non-CombineHiveInputFormat" - + "Please check your hive.input.format setting and make sure your Hadoop version support " - + "CombineFileInputFormat"), - NONEXISTPARTCOL("Non-Partition column appears in the partition specification: "), - UNSUPPORTED_TYPE("DATE and DATETIME types aren't supported yet. Please use " - + "TIMESTAMP instead"), - CREATE_NON_NATIVE_AS("CREATE TABLE AS SELECT cannot be used for a non-native table"), - LOAD_INTO_NON_NATIVE("A non-native table cannot be used as target for LOAD"), - LOCKMGR_NOT_SPECIFIED("Lock manager not specified correctly, set hive.lock.manager"), - LOCKMGR_NOT_INITIALIZED("Lock manager could not be initialized, check hive.lock.manager "), - LOCK_CANNOT_BE_ACQUIRED("Locks on the underlying objects cannot be acquired. retry after some time"), - ZOOKEEPER_CLIENT_COULD_NOT_BE_INITIALIZED("Check hive.zookeeper.quorum and hive.zookeeper.client.port"), - OVERWRITE_ARCHIVED_PART("Cannot overwrite an archived partition. " + - "Unarchive before running this command"), - ARCHIVE_METHODS_DISABLED("Archiving methods are currently disabled. " + - "Please see the Hive wiki for more information about enabling archiving"), - ARCHIVE_ON_MULI_PARTS("ARCHIVE can only be run on a single partition"), - UNARCHIVE_ON_MULI_PARTS("ARCHIVE can only be run on a single partition"), - ARCHIVE_ON_TABLE("ARCHIVE can only be run on partitions"), - RESERVED_PART_VAL("Partition value contains a reserved substring"), - HOLD_DDLTIME_ON_NONEXIST_PARTITIONS("HOLD_DDLTIME hint cannot be applied to dynamic " + - "partitions or non-existent partitions"), - OFFLINE_TABLE_OR_PARTITION("Query against an offline table or partition"), - OUTERJOIN_USES_FILTERS("The query results could be wrong. " + - "Turn on hive.outerjoin.supports.filters"), - NEED_PARTITION_SPECIFICATION("Table is partitioned and partition specification is needed"), - INVALID_METADATA("The metadata file could not be parsed "), - NEED_TABLE_SPECIFICATION("Table name could be determined; It should be specified "), - PARTITION_EXISTS("Partition already exists"), - TABLE_DATA_EXISTS("Table exists and contains data files"), - INCOMPATIBLE_SCHEMA("The existing table is not compatible with the import spec. "), - EXIM_FOR_NON_NATIVE("Export/Import cannot be done for a non-native table. "), - INSERT_INTO_BUCKETIZED_TABLE("Bucketized tables do not support INSERT INTO:"), - NO_COMPARE_BIGINT_STRING("In strict mode, comparing bigints and strings is not allowed, " - + "it may result in a loss of precision. " - + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - NO_COMPARE_BIGINT_DOUBLE("In strict mode, comparing bigints and doubles is not allowed, " - + "it may result in a loss of precision. " - + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - PARTSPEC_DIFFER_FROM_SCHEMA("Partition columns in partition specification are not the same as " - + "that defined in the table schema. The names and orders have to be exactly the same."), - PARTITION_COLUMN_NON_PRIMITIVE("Partition column must be of primitive type."), - INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS( - "Dynamic partitions do not support IF NOT EXISTS. Specified partitions with value :"), - ; - - private String mesg; - private String sqlState; - - private static final char SPACE = ' '; - private static final Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*line [0-9]+:[0-9]+ (.*)"); - private static Map mesgToErrorMsgMap = new HashMap(); - private static int minMesgLength = -1; - - static { - for (ErrorMsg errorMsg : values()) { - mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg); - - int length = errorMsg.getMsg().trim().length(); - if (minMesgLength == -1 || length < minMesgLength) { - minMesgLength = length; - } - } - } - - /** - * For a given error message string, searches for a ErrorMsg enum - * that appears to be a match. If an match is found, returns the - * SQLState associated with the ErrorMsg. If a match - * is not found or ErrorMsg has no SQLState, returns - * the SQLState bound to the GENERIC_ERROR - * ErrorMsg. - * - * @param mesg - * An error message string - * @return SQLState - */ - public static String findSQLState(String mesg) { - - if (mesg == null) { - return GENERIC_ERROR.getSQLState(); - } - - // first see if there is a direct match - ErrorMsg errorMsg = mesgToErrorMsgMap.get(mesg); - if (errorMsg != null) { - if (errorMsg.getSQLState() != null) { - return errorMsg.getSQLState(); - } else { - return GENERIC_ERROR.getSQLState(); - } - } - - // if not see if the mesg follows type of format, which is typically the - // case: - // line 1:14 Table not found table_name - String truncatedMesg = mesg.trim(); - Matcher match = ERROR_MESSAGE_PATTERN.matcher(mesg); - if (match.matches()) { - truncatedMesg = match.group(1); - } - - // appends might exist after the root message, so strip tokens off until we - // match - while (truncatedMesg.length() > minMesgLength) { - errorMsg = mesgToErrorMsgMap.get(truncatedMesg.trim()); - if (errorMsg != null) { - if (errorMsg.getSQLState() != null) { - return errorMsg.getSQLState(); - } else { - return GENERIC_ERROR.getSQLState(); - } - } - - int lastSpace = truncatedMesg.lastIndexOf(SPACE); - if (lastSpace == -1) { - break; - } - - // hack off the last word and try again - truncatedMesg = truncatedMesg.substring(0, lastSpace).trim(); - } - - return GENERIC_ERROR.getSQLState(); - } - - ErrorMsg(String mesg) { - // 42000 is the generic SQLState for syntax error. - this(mesg, "42000"); - } - - ErrorMsg(String mesg, String sqlState) { - this.mesg = mesg; - this.sqlState = sqlState; - } - - private static int getLine(ASTNode tree) { - if (tree.getChildCount() == 0) { - return tree.getToken().getLine(); - } - - return getLine((ASTNode) tree.getChild(0)); - } - - private static int getCharPositionInLine(ASTNode tree) { - if (tree.getChildCount() == 0) { - return tree.getToken().getCharPositionInLine(); - } - - return getCharPositionInLine((ASTNode) tree.getChild(0)); - } - - // Dirty hack as this will throw away spaces and other things - find a better - // way! - public static String getText(ASTNode tree) { - if (tree.getChildCount() == 0) { - return tree.getText(); - } - return getText((ASTNode) tree.getChild(tree.getChildCount() - 1)); - } - - public String getMsg(ASTNode tree) { - StringBuilder sb = new StringBuilder(); - renderPosition(sb, tree); - sb.append(" "); - sb.append(mesg); - sb.append(" '"); - sb.append(getText(tree)); - sb.append("'"); - renderOrigin(sb, tree.getOrigin()); - return sb.toString(); - } - - public static void renderOrigin(StringBuilder sb, ASTNodeOrigin origin) { - while (origin != null) { - sb.append(" in definition of "); - sb.append(origin.getObjectType()); - sb.append(" "); - sb.append(origin.getObjectName()); - sb.append(" ["); - sb.append(HiveUtils.LINE_SEP); - sb.append(origin.getObjectDefinition()); - sb.append(HiveUtils.LINE_SEP); - sb.append("] used as "); - sb.append(origin.getUsageAlias()); - sb.append(" at "); - ASTNode usageNode = origin.getUsageNode(); - renderPosition(sb, usageNode); - origin = usageNode.getOrigin(); - } - } - - private static void renderPosition(StringBuilder sb, ASTNode tree) { - sb.append("Line "); - sb.append(getLine(tree)); - sb.append(":"); - sb.append(getCharPositionInLine(tree)); - } - - public String getMsg(Tree tree) { - return getMsg((ASTNode) tree); - } - - public String getMsg(ASTNode tree, String reason) { - return getMsg(tree) + ": " + reason; - } - - public String getMsg(Tree tree, String reason) { - return getMsg((ASTNode) tree, reason); - } - - public String getMsg(String reason) { - return mesg + " " + reason; - } - - public String getMsg() { - return mesg; - } - - public String getSQLState() { - return sqlState; - } -} Index: ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java (working copy) @@ -42,6 +42,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.thrift.TDeserializer; import org.apache.thrift.TException; import org.apache.thrift.TSerializer; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (working copy) @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.Context; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; Index: ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (working copy) @@ -50,6 +50,7 @@ import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ArchiveUtils; import org.apache.hadoop.hive.ql.exec.FetchTask; import org.apache.hadoop.hive.ql.exec.Task; Index: ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (working copy) +++ ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (working copy) @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.parse; +package org.apache.hadoop.hive.ql; import java.util.HashMap; import java.util.Map; @@ -25,182 +25,212 @@ import org.antlr.runtime.tree.Tree; import org.apache.hadoop.hive.ql.metadata.HiveUtils; +import org.apache.hadoop.hive.ql.parse.ASTNode; +import org.apache.hadoop.hive.ql.parse.ASTNodeOrigin; /** * List of error messages thrown by the parser. **/ public enum ErrorMsg { + // The error codes are Hive-specific and partitioned into the following ranges: + // 10000 to 19999: Errors occuring during semantic analysis and compilation of the query. + // 20000 to 29999: Runtime errors where Hive believes that retries are unlikely to succeed. + // 30000 to 39999: Runtime errors which Hive thinks may be transient and retrying may succeed. + // 40000 to 49999: Errors where Hive is unable to advise about retries. + // In addition to the error code, ErrorMsg also has a SQLState field. // SQLStates are taken from Section 12.5 of ISO-9075. // See http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt // Most will just rollup to the generic syntax error state of 42000, but // specific errors can override the that state. // See this page for how MySQL uses SQLState codes: // http://dev.mysql.com/doc/refman/5.0/en/connector-j-reference-error-sqlstates.html + GENERIC_ERROR(40000, "Exception while processing"), - GENERIC_ERROR("Exception while processing"), - INVALID_TABLE("Table not found", "42S02"), - INVALID_COLUMN("Invalid column reference"), - INVALID_INDEX("Invalid index"), - INVALID_TABLE_OR_COLUMN("Invalid table alias or column reference"), - AMBIGUOUS_TABLE_OR_COLUMN("Ambiguous table alias or column reference"), - INVALID_PARTITION("Partition not found"), - AMBIGUOUS_COLUMN("Ambiguous column reference"), - AMBIGUOUS_TABLE_ALIAS("Ambiguous table alias"), - INVALID_TABLE_ALIAS("Invalid table alias"), - NO_TABLE_ALIAS("No table alias"), - INVALID_FUNCTION("Invalid function"), - INVALID_FUNCTION_SIGNATURE("Function argument type mismatch"), - INVALID_OPERATOR_SIGNATURE("Operator argument type mismatch"), - INVALID_ARGUMENT("Wrong arguments"), - INVALID_ARGUMENT_LENGTH("Arguments length mismatch", "21000"), - INVALID_ARGUMENT_TYPE("Argument type mismatch"), - INVALID_JOIN_CONDITION_1("Both left and right aliases encountered in JOIN"), - INVALID_JOIN_CONDITION_2("Neither left nor right aliases encountered in JOIN"), - INVALID_JOIN_CONDITION_3("OR not supported in JOIN currently"), - INVALID_TRANSFORM("TRANSFORM with other SELECT columns not supported"), - DUPLICATE_GROUPBY_KEY("Repeated key in GROUP BY"), - UNSUPPORTED_MULTIPLE_DISTINCTS("DISTINCT on different columns not supported with skew in data"), - NO_SUBQUERY_ALIAS("No alias for subquery"), - NO_INSERT_INSUBQUERY("Cannot insert in a subquery. Inserting to table "), - NON_KEY_EXPR_IN_GROUPBY("Expression not in GROUP BY key"), - INVALID_XPATH("General . and [] operators are not supported"), - INVALID_PATH("Invalid path"), ILLEGAL_PATH("Path is not legal"), - INVALID_NUMERICAL_CONSTANT("Invalid numerical constant"), - INVALID_ARRAYINDEX_CONSTANT("Non-constant expressions for array indexes not supported"), - INVALID_MAPINDEX_CONSTANT("Non-constant expression for map indexes not supported"), - INVALID_MAPINDEX_TYPE("MAP key type does not match index expression type"), - NON_COLLECTION_TYPE("[] not valid on non-collection types"), - SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"), - COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"), - DUPLICATE_COLUMN_NAMES("Duplicate column name:"), - INVALID_BUCKET_NUMBER("Bucket number should be bigger than zero"), - COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in CLUSTER BY and SORT BY"), - SAMPLE_RESTRICTION("Cannot SAMPLE on more than two columns"), - SAMPLE_COLUMN_NOT_FOUND("SAMPLE column not found"), - NO_PARTITION_PREDICATE("No partition predicate found"), - INVALID_DOT(". Operator is only supported on struct or list of struct types"), - INVALID_TBL_DDL_SERDE("Either list of columns or a custom serializer should be specified"), - TARGET_TABLE_COLUMN_MISMATCH( + INVALID_TABLE(10001, "Table not found", "42S02"), + INVALID_COLUMN(10002, "Invalid column reference"), + INVALID_INDEX(10003, "Invalid index"), + INVALID_TABLE_OR_COLUMN(10004, "Invalid table alias or column reference"), + AMBIGUOUS_TABLE_OR_COLUMN(10005, "Ambiguous table alias or column reference"), + INVALID_PARTITION(10006, "Partition not found"), + AMBIGUOUS_COLUMN(10007, "Ambiguous column reference"), + AMBIGUOUS_TABLE_ALIAS(10008, "Ambiguous table alias"), + INVALID_TABLE_ALIAS(10009, "Invalid table alias"), + NO_TABLE_ALIAS(10010, "No table alias"), + INVALID_FUNCTION(10011, "Invalid function"), + INVALID_FUNCTION_SIGNATURE(10012, "Function argument type mismatch"), + INVALID_OPERATOR_SIGNATURE(10013, "Operator argument type mismatch"), + INVALID_ARGUMENT(10014, "Wrong arguments"), + INVALID_ARGUMENT_LENGTH(10015, "Arguments length mismatch", "21000"), + INVALID_ARGUMENT_TYPE(10016, "Argument type mismatch"), + INVALID_JOIN_CONDITION_1(10017, "Both left and right aliases encountered in JOIN"), + INVALID_JOIN_CONDITION_2(10018, "Neither left nor right aliases encountered in JOIN"), + INVALID_JOIN_CONDITION_3(10019, "OR not supported in JOIN currently"), + INVALID_TRANSFORM(10020, "TRANSFORM with other SELECT columns not supported"), + DUPLICATE_GROUPBY_KEY(10021, "Repeated key in GROUP BY"), + UNSUPPORTED_MULTIPLE_DISTINCTS(10022, "DISTINCT on different columns not supported" + + " with skew in data"), + NO_SUBQUERY_ALIAS(10023, "No alias for subquery"), + NO_INSERT_INSUBQUERY(10024, "Cannot insert in a subquery. Inserting to table "), + NON_KEY_EXPR_IN_GROUPBY(10025, "Expression not in GROUP BY key"), + INVALID_XPATH(10026, "General . and [] operators are not supported"), + INVALID_PATH(10027, "Invalid path"), + ILLEGAL_PATH(10028, "Path is not legal"), + INVALID_NUMERICAL_CONSTANT(10029, "Invalid numerical constant"), + INVALID_ARRAYINDEX_CONSTANT(10030, "Non-constant expressions for array indexes not supported"), + INVALID_MAPINDEX_CONSTANT(10031, "Non-constant expression for map indexes not supported"), + INVALID_MAPINDEX_TYPE(10032, "MAP key type does not match index expression type"), + NON_COLLECTION_TYPE(10033, "[] not valid on non-collection types"), + SELECT_DISTINCT_WITH_GROUPBY(10034, "SELECT DISTINCT and GROUP BY can not be in the same query"), + COLUMN_REPEATED_IN_PARTITIONING_COLS(10035, "Column repeated in partitioning columns"), + DUPLICATE_COLUMN_NAMES(10036, "Duplicate column name:"), + INVALID_BUCKET_NUMBER(10037, "Bucket number should be bigger than zero"), + COLUMN_REPEATED_IN_CLUSTER_SORT(10038, "Same column cannot appear in CLUSTER BY and SORT BY"), + SAMPLE_RESTRICTION(10039, "Cannot SAMPLE on more than two columns"), + SAMPLE_COLUMN_NOT_FOUND(10040, "SAMPLE column not found"), + NO_PARTITION_PREDICATE(10041, "No partition predicate found"), + INVALID_DOT(10042, ". Operator is only supported on struct or list of struct types"), + INVALID_TBL_DDL_SERDE(10043, "Either list of columns or a custom serializer should be specified"), + TARGET_TABLE_COLUMN_MISMATCH(10044, "Cannot insert into target table because column number/types are different"), - TABLE_ALIAS_NOT_ALLOWED("Table alias not allowed in sampling clause"), - CLUSTERBY_DISTRIBUTEBY_CONFLICT("Cannot have both CLUSTER BY and DISTRIBUTE BY clauses"), - ORDERBY_DISTRIBUTEBY_CONFLICT("Cannot have both ORDER BY and DISTRIBUTE BY clauses"), - CLUSTERBY_SORTBY_CONFLICT("Cannot have both CLUSTER BY and SORT BY clauses"), - ORDERBY_SORTBY_CONFLICT("Cannot have both ORDER BY and SORT BY clauses"), - CLUSTERBY_ORDERBY_CONFLICT("Cannot have both CLUSTER BY and ORDER BY clauses"), - NO_LIMIT_WITH_ORDERBY("In strict mode, if ORDER BY is specified, LIMIT must also be specified"), - NO_CARTESIAN_PRODUCT("In strict mode, cartesian product is not allowed. " + TABLE_ALIAS_NOT_ALLOWED(10045, "Table alias not allowed in sampling clause"), + CLUSTERBY_DISTRIBUTEBY_CONFLICT(10046, "Cannot have both CLUSTER BY and DISTRIBUTE BY clauses"), + ORDERBY_DISTRIBUTEBY_CONFLICT(10047, "Cannot have both ORDER BY and DISTRIBUTE BY clauses"), + CLUSTERBY_SORTBY_CONFLICT(10048, "Cannot have both CLUSTER BY and SORT BY clauses"), + ORDERBY_SORTBY_CONFLICT(10049, "Cannot have both ORDER BY and SORT BY clauses"), + CLUSTERBY_ORDERBY_CONFLICT(10050, "Cannot have both CLUSTER BY and ORDER BY clauses"), + NO_LIMIT_WITH_ORDERBY(10051, "In strict mode, if ORDER BY is specified, " + + "LIMIT must also be specified"), + NO_CARTESIAN_PRODUCT(10052, "In strict mode, cartesian product is not allowed. " + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - UNION_NOTIN_SUBQ("Top level UNION is not supported currently; use a subquery for the UNION"), - INVALID_INPUT_FORMAT_TYPE("Input format must implement InputFormat"), - INVALID_OUTPUT_FORMAT_TYPE("Output Format must implement HiveOutputFormat, " + UNION_NOTIN_SUBQ(10053, "Top level UNION is not supported currently; " + + "use a subquery for the UNION"), + INVALID_INPUT_FORMAT_TYPE(10054, "Input format must implement InputFormat"), + INVALID_OUTPUT_FORMAT_TYPE(10055, "Output Format must implement HiveOutputFormat, " + "otherwise it should be either IgnoreKeyTextOutputFormat or SequenceFileOutputFormat"), - NO_VALID_PARTN("The query does not reference any valid partition. " + NO_VALID_PARTN(10056, "The query does not reference any valid partition. " + "To run this query, set hive.mapred.mode=nonstrict"), - NO_OUTER_MAPJOIN("MAPJOIN cannot be performed with OUTER JOIN"), - INVALID_MAPJOIN_HINT("Neither table specified as map-table"), - INVALID_MAPJOIN_TABLE("Result of a union cannot be a map table"), - NON_BUCKETED_TABLE("Sampling expression needed for non-bucketed table"), - BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR("Numberator should not be bigger than " + NO_OUTER_MAPJOIN(10057, "MAPJOIN cannot be performed with OUTER JOIN"), + INVALID_MAPJOIN_HINT(10058, "Neither table specified as map-table"), + INVALID_MAPJOIN_TABLE(10059, "Result of a union cannot be a map table"), + NON_BUCKETED_TABLE(10060, "Sampling expression needed for non-bucketed table"), + BUCKETED_NUMBERATOR_BIGGER_DENOMINATOR(10061, "Numberator should not be bigger than " + "denaminator in sample clause for table"), - NEED_PARTITION_ERROR("Need to specify partition columns because the destination " + NEED_PARTITION_ERROR(10062, "Need to specify partition columns because the destination " + "table is partitioned"), - CTAS_CTLT_COEXISTENCE("Create table command does not allow LIKE and AS-SELECT in " + CTAS_CTLT_COEXISTENCE(10063, "Create table command does not allow LIKE and AS-SELECT in " + "the same command"), - LINES_TERMINATED_BY_NON_NEWLINE("LINES TERMINATED BY only supports newline '\\n' right now"), - CTAS_COLLST_COEXISTENCE("CREATE TABLE AS SELECT command cannot specify the list of columns " + LINES_TERMINATED_BY_NON_NEWLINE(10064, "LINES TERMINATED BY only supports " + + "newline '\\n' right now"), + CTAS_COLLST_COEXISTENCE(10065, "CREATE TABLE AS SELECT command cannot specify " + + "the list of columns " + "for the target table"), - CTLT_COLLST_COEXISTENCE("CREATE TABLE LIKE command cannot specify the list of columns for " + CTLT_COLLST_COEXISTENCE(10066, "CREATE TABLE LIKE command cannot specify the list of columns for " + "the target table"), - INVALID_SELECT_SCHEMA("Cannot derive schema from the select-clause"), - CTAS_PARCOL_COEXISTENCE("CREATE-TABLE-AS-SELECT does not support partitioning in the target " - + "table"), - CTAS_MULTI_LOADFILE("CREATE-TABLE-AS-SELECT results in multiple file load"), - CTAS_EXTTBL_COEXISTENCE("CREATE-TABLE-AS-SELECT cannot create external table"), - INSERT_EXTERNAL_TABLE("Inserting into a external table is not allowed"), - DATABASE_NOT_EXISTS("Database does not exist:"), - TABLE_ALREADY_EXISTS("Table already exists:", "42S02"), - COLUMN_ALIAS_ALREADY_EXISTS("Column alias already exists:", "42S02"), - UDTF_MULTIPLE_EXPR("Only a single expression in the SELECT clause is supported with UDTF's"), - UDTF_REQUIRE_AS("UDTF's require an AS clause"), - UDTF_NO_GROUP_BY("GROUP BY is not supported with a UDTF in the SELECT clause"), - UDTF_NO_SORT_BY("SORT BY is not supported with a UDTF in the SELECT clause"), - UDTF_NO_CLUSTER_BY("CLUSTER BY is not supported with a UDTF in the SELECT clause"), - UDTF_NO_DISTRIBUTE_BY("DISTRUBTE BY is not supported with a UDTF in the SELECT clause"), - UDTF_INVALID_LOCATION("UDTF's are not supported outside the SELECT clause, nor nested " + INVALID_SELECT_SCHEMA(10067, "Cannot derive schema from the select-clause"), + CTAS_PARCOL_COEXISTENCE(10068, "CREATE-TABLE-AS-SELECT does not support " + + "partitioning in the target table "), + CTAS_MULTI_LOADFILE(10069, "CREATE-TABLE-AS-SELECT results in multiple file load"), + CTAS_EXTTBL_COEXISTENCE(10070, "CREATE-TABLE-AS-SELECT cannot create external table"), + INSERT_EXTERNAL_TABLE(10071, "Inserting into a external table is not allowed"), + DATABASE_NOT_EXISTS(10072, "Database does not exist:"), + TABLE_ALREADY_EXISTS(10073, "Table already exists:", "42S02"), + COLUMN_ALIAS_ALREADY_EXISTS(10074, "Column alias already exists:", "42S02"), + UDTF_MULTIPLE_EXPR(10075, "Only a single expression in the SELECT clause is " + + "supported with UDTF's"), + UDTF_REQUIRE_AS(10076, "UDTF's require an AS clause"), + UDTF_NO_GROUP_BY(10077, "GROUP BY is not supported with a UDTF in the SELECT clause"), + UDTF_NO_SORT_BY(10078, "SORT BY is not supported with a UDTF in the SELECT clause"), + UDTF_NO_CLUSTER_BY(10079, "CLUSTER BY is not supported with a UDTF in the SELECT clause"), + UDTF_NO_DISTRIBUTE_BY(10080, "DISTRUBTE BY is not supported with a UDTF in the SELECT clause"), + UDTF_INVALID_LOCATION(10081, "UDTF's are not supported outside the SELECT clause, nor nested " + "in expressions"), - UDAF_INVALID_LOCATION("Not yet supported place for UDAF"), - UDTF_LATERAL_VIEW("UDTF's cannot be in a select expression when there is a lateral view"), - UDTF_ALIAS_MISMATCH("The number of aliases supplied in the AS clause does not match the " + UDTF_LATERAL_VIEW(10082, "UDTF's cannot be in a select expression when there is a lateral view"), + UDTF_ALIAS_MISMATCH(10083, "The number of aliases supplied in the AS clause does not match the " + "number of columns output by the UDTF"), - UDF_STATEFUL_INVALID_LOCATION("Stateful UDF's can only be invoked in the SELECT list"), - LATERAL_VIEW_WITH_JOIN("JOIN with a LATERAL VIEW is not supported"), - LATERAL_VIEW_INVALID_CHILD("LATERAL VIEW AST with invalid child"), - OUTPUT_SPECIFIED_MULTIPLE_TIMES("The same output cannot be present multiple times: "), - INVALID_AS("AS clause has an invalid number of aliases"), - VIEW_COL_MISMATCH("The number of columns produced by the SELECT clause does not match the " + UDF_STATEFUL_INVALID_LOCATION(10084, "Stateful UDF's can only be invoked in the SELECT list"), + LATERAL_VIEW_WITH_JOIN(10085, "JOIN with a LATERAL VIEW is not supported"), + LATERAL_VIEW_INVALID_CHILD(10086, "LATERAL VIEW AST with invalid child"), + OUTPUT_SPECIFIED_MULTIPLE_TIMES(10087, "The same output cannot be present multiple times: "), + INVALID_AS(10088, "AS clause has an invalid number of aliases"), + VIEW_COL_MISMATCH(10089, "The number of columns produced by the SELECT clause does not match the " + "number of column names specified by CREATE VIEW"), - DML_AGAINST_VIEW("A view cannot be used as target table for LOAD or INSERT"), - ANALYZE_VIEW("ANALYZE is not supported for views"), - VIEW_PARTITION_TOTAL("At least one non-partitioning column must be present in view"), - VIEW_PARTITION_MISMATCH("Rightmost columns in view output do not match PARTITIONED ON clause"), - PARTITION_DYN_STA_ORDER("Dynamic partition cannot be the parent of a static partition"), - DYNAMIC_PARTITION_DISABLED("Dynamic partition is disabled. Either enable it by setting " + DML_AGAINST_VIEW(10090, "A view cannot be used as target table for LOAD or INSERT"), + ANALYZE_VIEW(10091, "ANALYZE is not supported for views"), + VIEW_PARTITION_TOTAL(10092, "At least one non-partitioning column must be present in view"), + VIEW_PARTITION_MISMATCH(10093, "Rightmost columns in view output do not match " + + "PARTITIONED ON clause"), + PARTITION_DYN_STA_ORDER(10094, "Dynamic partition cannot be the parent of a static partition"), + DYNAMIC_PARTITION_DISABLED(10095, "Dynamic partition is disabled. Either enable it by setting " + "hive.exec.dynamic.partition=true or specify partition column values"), - DYNAMIC_PARTITION_STRICT_MODE("Dynamic partition strict mode requires at least one " + DYNAMIC_PARTITION_STRICT_MODE(10096, "Dynamic partition strict mode requires at least one " + "static partition column. To turn this off set hive.exec.dynamic.partition.mode=nonstrict"), - DYNAMIC_PARTITION_MERGE("Dynamic partition does not support merging using non-CombineHiveInputFormat" - + "Please check your hive.input.format setting and make sure your Hadoop version support " - + "CombineFileInputFormat"), - NONEXISTPARTCOL("Non-Partition column appears in the partition specification: "), - UNSUPPORTED_TYPE("DATE and DATETIME types aren't supported yet. Please use " + DYNAMIC_PARTITION_MERGE(10097, "Dynamic partition does not support merging using " + + "non-CombineHiveInputFormat. Please check your hive.input.format setting and " + + "make sure your Hadoop version support CombineFileInputFormat"), + NONEXISTPARTCOL(10098, "Non-Partition column appears in the partition specification: "), + UNSUPPORTED_TYPE(10099, "DATE and DATETIME types aren't supported yet. Please use " + "TIMESTAMP instead"), - CREATE_NON_NATIVE_AS("CREATE TABLE AS SELECT cannot be used for a non-native table"), - LOAD_INTO_NON_NATIVE("A non-native table cannot be used as target for LOAD"), - LOCKMGR_NOT_SPECIFIED("Lock manager not specified correctly, set hive.lock.manager"), - LOCKMGR_NOT_INITIALIZED("Lock manager could not be initialized, check hive.lock.manager "), - LOCK_CANNOT_BE_ACQUIRED("Locks on the underlying objects cannot be acquired. retry after some time"), - ZOOKEEPER_CLIENT_COULD_NOT_BE_INITIALIZED("Check hive.zookeeper.quorum and hive.zookeeper.client.port"), - OVERWRITE_ARCHIVED_PART("Cannot overwrite an archived partition. " + + CREATE_NON_NATIVE_AS(10100, "CREATE TABLE AS SELECT cannot be used for a non-native table"), + LOAD_INTO_NON_NATIVE(10101, "A non-native table cannot be used as target for LOAD"), + LOCKMGR_NOT_SPECIFIED(10102, "Lock manager not specified correctly, set hive.lock.manager"), + LOCKMGR_NOT_INITIALIZED(10103, "Lock manager could not be initialized, check hive.lock.manager "), + LOCK_CANNOT_BE_ACQUIRED(10104, "Locks on the underlying objects cannot be acquired. " + + "retry after some time"), + ZOOKEEPER_CLIENT_COULD_NOT_BE_INITIALIZED(10105, "Check hive.zookeeper.quorum " + + "and hive.zookeeper.client.port"), + OVERWRITE_ARCHIVED_PART(10106, "Cannot overwrite an archived partition. " + "Unarchive before running this command"), - ARCHIVE_METHODS_DISABLED("Archiving methods are currently disabled. " + + ARCHIVE_METHODS_DISABLED(10107, "Archiving methods are currently disabled. " + "Please see the Hive wiki for more information about enabling archiving"), - ARCHIVE_ON_MULI_PARTS("ARCHIVE can only be run on a single partition"), - UNARCHIVE_ON_MULI_PARTS("ARCHIVE can only be run on a single partition"), - ARCHIVE_ON_TABLE("ARCHIVE can only be run on partitions"), - RESERVED_PART_VAL("Partition value contains a reserved substring"), - HOLD_DDLTIME_ON_NONEXIST_PARTITIONS("HOLD_DDLTIME hint cannot be applied to dynamic " + + ARCHIVE_ON_MULI_PARTS(10108, "ARCHIVE can only be run on a single partition"), + UNARCHIVE_ON_MULI_PARTS(10109, "ARCHIVE can only be run on a single partition"), + ARCHIVE_ON_TABLE(10110, "ARCHIVE can only be run on partitions"), + RESERVED_PART_VAL(10111, "Partition value contains a reserved substring"), + HOLD_DDLTIME_ON_NONEXIST_PARTITIONS(10112, "HOLD_DDLTIME hint cannot be applied to dynamic " + "partitions or non-existent partitions"), - OFFLINE_TABLE_OR_PARTITION("Query against an offline table or partition"), - OUTERJOIN_USES_FILTERS("The query results could be wrong. " + + OFFLINE_TABLE_OR_PARTITION(10113, "Query against an offline table or partition"), + OUTERJOIN_USES_FILTERS(10114, "The query results could be wrong. " + "Turn on hive.outerjoin.supports.filters"), - NEED_PARTITION_SPECIFICATION("Table is partitioned and partition specification is needed"), - INVALID_METADATA("The metadata file could not be parsed "), - NEED_TABLE_SPECIFICATION("Table name could be determined; It should be specified "), - PARTITION_EXISTS("Partition already exists"), - TABLE_DATA_EXISTS("Table exists and contains data files"), - INCOMPATIBLE_SCHEMA("The existing table is not compatible with the import spec. "), - EXIM_FOR_NON_NATIVE("Export/Import cannot be done for a non-native table. "), - INSERT_INTO_BUCKETIZED_TABLE("Bucketized tables do not support INSERT INTO:"), - NO_COMPARE_BIGINT_STRING("In strict mode, comparing bigints and strings is not allowed, " + NEED_PARTITION_SPECIFICATION(10115, "Table is partitioned and partition specification is needed"), + INVALID_METADATA(10116, "The metadata file could not be parsed "), + NEED_TABLE_SPECIFICATION(10117, "Table name could be determined; It should be specified "), + PARTITION_EXISTS(10118, "Partition already exists"), + TABLE_DATA_EXISTS(10119, "Table exists and contains data files"), + INCOMPATIBLE_SCHEMA(10120, "The existing table is not compatible with the import spec. "), + EXIM_FOR_NON_NATIVE(10121, "Export/Import cannot be done for a non-native table. "), + INSERT_INTO_BUCKETIZED_TABLE(10122, "Bucketized tables do not support INSERT INTO:"), + NO_COMPARE_BIGINT_STRING(10123, "In strict mode, comparing bigints and strings is not allowed, " + "it may result in a loss of precision. " + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - NO_COMPARE_BIGINT_DOUBLE("In strict mode, comparing bigints and doubles is not allowed, " + NO_COMPARE_BIGINT_DOUBLE(10124, "In strict mode, comparing bigints and doubles is not allowed, " + "it may result in a loss of precision. " + "If you really want to perform the operation, set hive.mapred.mode=nonstrict"), - PARTSPEC_DIFFER_FROM_SCHEMA("Partition columns in partition specification are not the same as " - + "that defined in the table schema. The names and orders have to be exactly the same."), - PARTITION_COLUMN_NON_PRIMITIVE("Partition column must be of primitive type."), - INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS( + PARTSPEC_DIFFER_FROM_SCHEMA(10125, "Partition columns in partition specification are " + + "not the same as that defined in the table schema. " + + "The names and orders have to be exactly the same."), + PARTITION_COLUMN_NON_PRIMITIVE(10126, "Partition column must be of primitive type."), + INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS(10127, "Dynamic partitions do not support IF NOT EXISTS. Specified partitions with value :"), - ; + UDAF_INVALID_LOCATION(10128, "Not yet supported place for UDAF"), + SCRIPT_INIT_ERROR(20000, "Unable to initialize custom script."), + SCRIPT_IO_ERROR(20001, "An error occurred while reading or writing to your custom script. " + + "It may have crashed with an error."), + SCRIPT_GENERIC_ERROR(20002, "Hive encountered some unknown error while " + + "running your custom script."), + SCRIPT_CLOSING_ERROR(20003, "An error occurred when trying to close the Operator " + + "running your custom script.") + ; + + private int errorCode; private String mesg; private String sqlState; private static final char SPACE = ' '; - private static final Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*line [0-9]+:[0-9]+ (.*)"); + private static final Pattern ERROR_MESSAGE_PATTERN = Pattern.compile(".*Line [0-9]+:[0-9]+ (.*)"); + private static final Pattern ERROR_CODE_PATTERN = + Pattern.compile("HiveException:\\s+\\[Error ([0-9]+)\\]: (.*)"); private static Map mesgToErrorMsgMap = new HashMap(); private static int minMesgLength = -1; @@ -216,31 +246,19 @@ } /** - * For a given error message string, searches for a ErrorMsg enum - * that appears to be a match. If an match is found, returns the - * SQLState associated with the ErrorMsg. If a match - * is not found or ErrorMsg has no SQLState, returns - * the SQLState bound to the GENERIC_ERROR - * ErrorMsg. - * - * @param mesg - * An error message string - * @return SQLState + * Given an error message string, returns the ErrorMsg object associated with it. + * @param mesg An error message string + * @return ErrorMsg */ - public static String findSQLState(String mesg) { - + public static ErrorMsg getErrorMsg(String mesg) { if (mesg == null) { - return GENERIC_ERROR.getSQLState(); + return GENERIC_ERROR; } // first see if there is a direct match ErrorMsg errorMsg = mesgToErrorMsgMap.get(mesg); if (errorMsg != null) { - if (errorMsg.getSQLState() != null) { - return errorMsg.getSQLState(); - } else { - return GENERIC_ERROR.getSQLState(); - } + return errorMsg; } // if not see if the mesg follows type of format, which is typically the @@ -257,11 +275,7 @@ while (truncatedMesg.length() > minMesgLength) { errorMsg = mesgToErrorMsgMap.get(truncatedMesg.trim()); if (errorMsg != null) { - if (errorMsg.getSQLState() != null) { - return errorMsg.getSQLState(); - } else { - return GENERIC_ERROR.getSQLState(); - } + return errorMsg; } int lastSpace = truncatedMesg.lastIndexOf(SPACE); @@ -273,15 +287,47 @@ truncatedMesg = truncatedMesg.substring(0, lastSpace).trim(); } - return GENERIC_ERROR.getSQLState(); + return GENERIC_ERROR; } - ErrorMsg(String mesg) { + /** + * Given an error code, returns the ErrorMsg object associated with it. + * @param errorCode An error code + * @return ErrorMsg + */ + public static ErrorMsg getErrorMsg(int errorCode) { + for (ErrorMsg errorMsg : values()) { + if (errorMsg.getErrorCode() == errorCode) { + return errorMsg; + } + } + return null; + } + + /** + * For a given error message string, searches for a ErrorMsg enum + * that appears to be a match. If an match is found, returns the + * SQLState associated with the ErrorMsg. If a match + * is not found or ErrorMsg has no SQLState, returns + * the SQLState bound to the GENERIC_ERROR + * ErrorMsg. + * + * @param mesg + * An error message string + * @return SQLState + */ + public static String findSQLState(String mesg) { + ErrorMsg error = getErrorMsg(mesg); + return error.getSQLState(); + } + + private ErrorMsg(int errorCode, String mesg) { // 42000 is the generic SQLState for syntax error. - this(mesg, "42000"); + this(errorCode, mesg, "42000"); } - ErrorMsg(String mesg, String sqlState) { + private ErrorMsg(int errorCode, String mesg, String sqlState) { + this.errorCode = errorCode; this.mesg = mesg; this.sqlState = sqlState; } @@ -365,6 +411,14 @@ return mesg + " " + reason; } + public String getErrorCodedMsg() { + return "[Error " + errorCode + "]: " + mesg; + } + + public static Pattern getErrorCodePattern() { + return ERROR_CODE_PATTERN; + } + public String getMsg() { return mesg; } @@ -372,4 +426,8 @@ public String getSQLState() { return sqlState; } + + public int getErrorCode() { + return errorCode; + } } Index: ql/src/java/org/apache/hadoop/hive/ql/Driver.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java (revision 1336457) +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java (working copy) @@ -82,7 +82,6 @@ import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; -import org.apache.hadoop.hive.ql.parse.ErrorMsg; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext; import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContextImpl; import org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer; @@ -496,24 +495,17 @@ //restore state after we're done executing a specific query return 0; - } catch (SemanticException e) { - errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage(); - SQLState = ErrorMsg.findSQLState(e.getMessage()); + } catch (Exception e) { + ErrorMsg error = ErrorMsg.getErrorMsg(e.getMessage()); + errorMessage = "FAILED: " + e.getClass().getSimpleName(); + if (error != ErrorMsg.GENERIC_ERROR) { + errorMessage += " [Error " + error.getErrorCode() + "]:"; + } + errorMessage += " " + e.getMessage(); + SQLState = error.getSQLState(); console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return (10); - } catch (ParseException e) { - errorMessage = "FAILED: Parse Error: " + e.getMessage(); - SQLState = ErrorMsg.findSQLState(e.getMessage()); - console.printError(errorMessage, "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return (11); - } catch (Exception e) { - errorMessage = "FAILED: Hive Internal Error: " + Utilities.getNameMessage(e); - SQLState = ErrorMsg.findSQLState(e.getMessage()); - console.printError(errorMessage + "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return (12); + return error.getErrorCode(); } finally { perfLogger.PerfLogEnd(LOG, PerfLogger.COMPILE); restoreSession(queryState); @@ -1137,8 +1129,11 @@ if (backupTask != null) { errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); + ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); + if (em != null) { + errorMessage += ". " + em.getMsg(); + } console.printError(errorMessage); - errorMessage = "ATTEMPT: Execute BackupTask: " + backupTask.getClass().getName(); console.printError(errorMessage); @@ -1159,9 +1154,12 @@ perfLogger.PerfLogEnd(LOG, PerfLogger.FAILURE_HOOK + ofh.getClass().getName()); } - // TODO: This error messaging is not very informative. Fix that. errorMessage = "FAILED: Execution Error, return code " + exitVal + " from " + tsk.getClass().getName(); + ErrorMsg em = ErrorMsg.getErrorMsg(exitVal); + if (em != null) { + errorMessage += ". " + em.getMsg(); + } SQLState = "08S01"; console.printError(errorMessage); if (running.size() != 0) { @@ -1170,7 +1168,7 @@ // in case we decided to run everything in local mode, restore the // the jobtracker setting to its initial value ctx.restoreOriginalTracker(); - return 9; + return exitVal; } }