Index: data/scripts/input20_script =================================================================== --- data/scripts/input20_script (revision 0) +++ data/scripts/input20_script (revision 0) @@ -0,0 +1,3 @@ +#! /bin/bash + +uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@" Property changes on: data/scripts/input20_script ___________________________________________________________________ Added: svn:executable + * Index: data/scripts/error_script =================================================================== --- data/scripts/error_script (revision 0) +++ data/scripts/error_script (revision 0) @@ -0,0 +1,11 @@ +#! /bin/bash + +exit 1 +ret=0 +while [ "$ret" = "0" ]; +do + read -t 1 -a v + ret=$? +done + +exit 1 Property changes on: data/scripts/error_script ___________________________________________________________________ Added: svn:executable + * Index: ql/src/test/results/clientnegative/script_error.q.out =================================================================== --- ql/src/test/results/clientnegative/script_error.q.out (revision 0) +++ ql/src/test/results/clientnegative/script_error.q.out (revision 0) @@ -0,0 +1,36 @@ +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF src key) (TOK_COLREF src value)) '../data/scripts/error_script' (TOK_ALIASLIST tkey tvalue)))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + Transform Operator + command: ../data/scripts/error_script + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask Index: ql/src/test/results/clientpositive/input20.q.out =================================================================== --- ql/src/test/results/clientpositive/input20.q.out (revision 753210) +++ ql/src/test/results/clientpositive/input20.q.out (working copy) @@ -1,5 +1,5 @@ ABSTRACT SYNTAX TREE: - (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (% (TOK_COLREF src key) 2) (% (TOK_COLREF src key) 5)) 'cat'))) (TOK_CLUSTERBY (TOK_COLREF key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF tmap key) (TOK_COLREF tmap value)) 'uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@"' (TOK_ALIASLIST key value)))))) + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (% (TOK_COLREF src key) 2) (% (TOK_COLREF src key) 5)) 'cat'))) (TOK_CLUSTERBY (TOK_COLREF key)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_COLREF tmap key) (TOK_COLREF tmap value)) '../data/scripts/input20_script' (TOK_ALIASLIST key value)))))) STAGE DEPENDENCIES: Stage-1 is a root stage @@ -48,7 +48,7 @@ expr: 1 type: string Transform Operator - command: uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@" + command: ../data/scripts/input20_script output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat @@ -72,3 +72,396 @@ name: dest1 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_2.0 +2 0.0_3.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_0.0 +1 0.0_4.0 +3 0.0_0.0 +1 0.0_4.0 +2 0.0_1.0 +1 0.0_0.0 +2 0.0_2.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_4.0 +1 0.0_0.0 +3 0.0_4.0 +1 0.0_1.0 +2 0.0_3.0 +1 0.0_0.0 +3 0.0_3.0 +1 0.0_2.0 +1 0.0_0.0 +2 0.0_3.0 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_4.0 +2 0.0_3.0 +1 0.0_0.0 +1 0.0_3.0 +2 0.0_4.0 +2 0.0_0.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_0.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_0.0 +1 0.0_4.0 +1 0.0_0.0 +1 0.0_3.0 +1 0.0_2.0 +1 0.0_3.0 +1 0.0_1.0 +2 0.0_3.0 +2 0.0_2.0 +1 0.0_4.0 +1 0.0_0.0 +1 0.0_4.0 +1 0.0_0.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_4.0 +2 0.0_0.0 +2 0.0_3.0 +1 0.0_4.0 +2 0.0_2.0 +1 0.0_1.0 +2 0.0_3.0 +2 0.0_0.0 +1 0.0_4.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_0.0 +2 0.0_1.0 +1 0.0_4.0 +2 0.0_3.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_2.0 +2 0.0_1.0 +1 0.0_3.0 +1 0.0_0.0 +2 0.0_3.0 +1 0.0_0.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_4.0 +1 0.0_1.0 +1 0.0_4.0 +1 0.0_2.0 +2 0.0_0.0 +1 0.0_2.0 +1 0.0_0.0 +1 0.0_2.0 +2 0.0_3.0 +1 0.0_4.0 +1 0.0_0.0 +1 0.0_1.0 +1 0.0_3.0 +2 0.0_2.0 +2 0.0_4.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_2.0 +1 0.0_1.0 +2 0.0_2.0 +1 0.0_1.0 +1 0.0_0.0 +1 0.0_1.0 +1 0.0_2.0 +1 0.0_3.0 +2 0.0_1.0 +1 0.0_4.0 +3 0.0_3.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_1.0 +1 0.0_4.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_0.0 +1 0.0_4.0 +1 0.0_0.0 +1 0.0_1.0 +2 0.0_4.0 +1 0.0_2.0 +1 0.0_3.0 +1 0.0_1.0 +2 0.0_2.0 +1 0.0_3.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_1.0 +2 0.0_4.0 +1 0.0_0.0 +2 0.0_1.0 +1 0.0_0.0 +1 0.0_4.0 +2 0.0_3.0 +2 0.0_0.0 +1 0.0_1.0 +1 0.0_4.0 +2 0.0_3.0 +2 0.0_2.0 +1 0.0_0.0 +2 0.0_1.0 +1 0.0_2.0 +1 0.0_4.0 +2 0.0_3.0 +1 0.0_2.0 +1 0.0_3.0 +1 0.0_2.0 +1 0.0_4.0 +1 0.0_1.0 +1 0.0_0.0 +1 0.0_4.0 +2 0.0_1.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_0.0 +1 0.0_3.0 +1 0.0_2.0 +1 0.0_1.0 +1 0.0_3.0 +2 0.0_0.0 +1 0.0_4.0 +1 0.0_3.0 +1 0.0_4.0 +1 0.0_0.0 +1 0.0_2.0 +1 0.0_3.0 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_0.0 +1 0.0_1.0 +1 0.0_0.0 +3 0.0_3.0 +1 0.0_1.0 +1 0.0_3.0 +1 0.0_0.0 +3 0.0_2.0 +1 0.0_0.0 +1 0.0_4.0 +1 0.0_2.0 +1 0.0_1.0 +2 1.0_4.0 +2 1.0_3.0 +2 1.0_2.0 +2 1.0_0.0 +1 1.0_2.0 +1 1.0_0.0 +1 1.0_2.0 +1 1.0_3.0 +1 1.0_1.0 +3 1.0_0.0 +3 1.0_4.0 +1 1.0_2.0 +1 1.0_1.0 +1 1.0_2.0 +1 1.0_0.0 +2 1.0_3.0 +3 1.0_4.0 +2 1.0_2.0 +1 1.0_0.0 +1 1.0_3.0 +1 1.0_0.0 +1 1.0_1.0 +1 1.0_2.0 +2 1.0_4.0 +3 1.0_3.0 +1 1.0_0.0 +1 1.0_2.0 +1 1.0_0.0 +1 1.0_3.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_1.0 +3 1.0_0.0 +1 1.0_4.0 +1 1.0_1.0 +1 1.0_4.0 +2 1.0_3.0 +1 1.0_1.0 +1 1.0_2.0 +1 1.0_1.0 +1 1.0_2.0 +1 1.0_3.0 +4 1.0_1.0 +1 1.0_2.0 +2 1.0_1.0 +1 1.0_0.0 +1 1.0_2.0 +2 1.0_3.0 +2 1.0_2.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_1.0 +4 1.0_3.0 +1 1.0_4.0 +1 1.0_2.0 +2 1.0_1.0 +1 1.0_2.0 +2 1.0_4.0 +1 1.0_0.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_2.0 +1 1.0_1.0 +1 1.0_0.0 +1 1.0_4.0 +1 1.0_3.0 +1 1.0_0.0 +1 1.0_3.0 +1 1.0_2.0 +2 1.0_0.0 +1 1.0_2.0 +1 1.0_0.0 +1 1.0_3.0 +1 1.0_2.0 +1 1.0_0.0 +1 1.0_1.0 +1 1.0_0.0 +1 1.0_1.0 +1 1.0_4.0 +1 1.0_3.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_1.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_0.0 +2 1.0_4.0 +2 1.0_2.0 +1 1.0_1.0 +1 1.0_3.0 +1 1.0_2.0 +1 1.0_1.0 +2 1.0_2.0 +2 1.0_0.0 +4 1.0_4.0 +1 1.0_3.0 +1 1.0_1.0 +1 1.0_3.0 +2 1.0_0.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_2.0 +1 1.0_1.0 +1 1.0_2.0 +2 1.0_4.0 +1 1.0_0.0 +1 1.0_4.0 +2 1.0_2.0 +1 1.0_4.0 +2 1.0_2.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_2.0 +1 1.0_3.0 +1 1.0_2.0 +1 1.0_4.0 +2 1.0_3.0 +1 1.0_2.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_3.0 +1 1.0_1.0 +1 1.0_2.0 +2 1.0_3.0 +1 1.0_4.0 +1 1.0_1.0 +3 1.0_0.0 +1 1.0_2.0 +2 1.0_0.0 +1 1.0_2.0 +2 1.0_1.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_4.0 +1 1.0_3.0 +1 1.0_2.0 +1 1.0_0.0 +1 1.0_4.0 +1 1.0_3.0 +1 1.0_2.0 +2 1.0_3.0 +1 1.0_0.0 +2 1.0_4.0 +2 1.0_2.0 +1 1.0_3.0 +1 1.0_1.0 +1 1.0_3.0 +2 1.0_4.0 +1 1.0_3.0 +2 1.0_4.0 +1 1.0_3.0 +1 1.0_1.0 +2 1.0_4.0 +1 1.0_3.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_0.0 +1 1.0_1.0 +1 1.0_4.0 +1 1.0_3.0 +1 1.0_2.0 +2 1.0_4.0 +1 1.0_0.0 +1 1.0_1.0 +1 1.0_3.0 +1 1.0_1.0 +1 1.0_2.0 +2 1.0_3.0 +1 1.0_4.0 +1 1.0_2.0 +1 1.0_1.0 +1 1.0_3.0 +1 1.0_2.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_4.0 +1 1.0_0.0 +1 1.0_4.0 +2 1.0_2.0 +1 1.0_0.0 +1 1.0_3.0 +1 1.0_4.0 +1 1.0_1.0 +1 1.0_2.0 +1 1.0_4.0 +3 1.0_2.0 +1 1.0_1.0 +1 1.0_0.0 +1 1.0_4.0 +1 1.0_3.0 +1 1.0_1.0 Index: ql/src/test/queries/clientnegative/script_error.q =================================================================== --- ql/src/test/queries/clientnegative/script_error.q (revision 0) +++ ql/src/test/queries/clientnegative/script_error.q (revision 0) @@ -0,0 +1,7 @@ +EXPLAIN +SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue) +FROM src; + +SELECT TRANSFORM(src.key, src.value) USING '../data/scripts/error_script' AS (tkey, tvalue) +FROM src; + Index: ql/src/test/queries/clientpositive/input20.q =================================================================== --- ql/src/test/queries/clientpositive/input20.q (revision 753210) +++ ql/src/test/queries/clientpositive/input20.q (working copy) @@ -9,7 +9,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING 'uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@"' +USING '../data/scripts/input20_script' AS key, value; FROM ( @@ -20,5 +20,7 @@ ) tmap INSERT OVERWRITE TABLE dest1 REDUCE tmap.key, tmap.value -USING 'uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@"' +USING '../data/scripts/input20_script' AS key, value; + +SELECT * FROM dest1; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 753210) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy) @@ -262,17 +262,18 @@ } public void close(boolean abort) throws HiveException { - try { - logStats(); - if(childOperators == null) - return; + try { + logStats(); + if(childOperators == null) + return; - for(Operator op: childOperators) { - op.close(abort); + for(Operator op: childOperators) { + op.close(abort); + } + } catch (HiveException e) { + e.printStackTrace(); + throw e; } - - } catch (HiveException e) { - } } /** Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (revision 753210) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (working copy) @@ -75,7 +75,8 @@ outWriter.close(abort); commit(); } catch (IOException e) { - throw new HiveException("Error in committing output in file: "+ outPath.toString()); + // Don't throw an exception, just ignore and return + return; } } } else {