diff --git itests/util/src/main/java/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java itests/util/src/main/java/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java index 351ef00..322834e 100644 --- itests/util/src/main/java/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java +++ itests/util/src/main/java/org/apache/hadoop/hive/ql/security/InjectableDummyAuthenticator.java @@ -82,12 +82,9 @@ public Configuration getConf() { public void setConf(Configuration config) { try { hmap = hmapClass.newInstance(); - } catch (InstantiationException e) { + } catch (Exception e) { throw new RuntimeException("Whoops, could not create an Authenticator of class " + - hmapClass.getName()); - } catch (IllegalAccessException e) { - throw new RuntimeException("Whoops, could not create an Authenticator of class " + - hmapClass.getName()); + hmapClass.getName(), e); } hmap.setConf(config); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 2227e6f..3a2a6ee 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -242,7 +242,7 @@ public int execute(DriverContext driverContext) { job.setPartitionerClass((Class) (Class.forName(HiveConf.getVar(job, HiveConf.ConfVars.HIVEPARTITIONER)))); } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); } if (mWork.getNumMapTasks() != null) { @@ -288,7 +288,7 @@ public int execute(DriverContext driverContext) { try { job.setInputFormat((Class) (Class.forName(inpFormat))); } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java index 297ce44..f7612d6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/CustomPartitionVertex.java @@ -234,7 +234,7 @@ public void onRootVertexInitialized(String inputName, InputDescriptor inputDescr try { fileSplit = getFileSplitFromEvent(diEvent); } catch (IOException e) { - throw new RuntimeException("Failed to get file split for event: " + diEvent); + throw new RuntimeException("Failed to get file split for event: " + diEvent, e); } Set fsList = pathFileSplitsMap.get(Utilities.getBucketFileNameFromPathSubString(fileSplit.getPath() diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java index d06bdb9..c9029f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java @@ -578,7 +578,7 @@ private Vertex createVertex(JobConf conf, MapWork mapWork, } } catch (IOException e) { throw new RuntimeException( - "Can't make path " + outputPath + " : " + e.getMessage()); + "Can't make path " + outputPath + " : " + e.getMessage(), e); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java index 3378842..5895ef5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java @@ -51,7 +51,7 @@ protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) { } catch (Exception e) { // This should never happen. If it does, there is a bug. - throw new RuntimeException("Internal error: unable to convert decimal to string"); + throw new RuntimeException("Internal error: unable to convert decimal to string", e); } assign(outV, i, b, b.length); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java index e3c5b7f..8246e9f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFArgDesc.java @@ -107,7 +107,7 @@ public DeferredObject getDeferredJavaObject(int row, VectorizedRowBatch b, int a o = writers[argPosition].writeValue(cv, row); return new GenericUDF.DeferredJavaObject(o); } catch (HiveException e) { - throw new RuntimeException("Unable to get Java object from VectorizedRowBatch"); + throw new RuntimeException("Unable to get Java object from VectorizedRowBatch", e); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java index c52624c..c62add0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/index/HiveIndexedInputFormat.java @@ -185,7 +185,7 @@ public HiveIndexedInputFormat(String indexFileName) { } } catch (HiveException e) { throw new RuntimeException( - "Unable to get metadata for input table split" + split.getPath()); + "Unable to get metadata for input table split" + split.getPath(), e); } } InputSplit retA[] = newSplits.toArray((new FileSplit[newSplits.size()])); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java index 8f58c65..41b5f1c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java @@ -66,8 +66,8 @@ public void write(final ArrayWritable record) { writeGroupFields(record, schema); } catch (RuntimeException e) { String errorMessage = "Parquet record is malformed: " + e.getMessage(); - LOG.error(errorMessage); - throw new RuntimeException(errorMessage); + LOG.error(errorMessage, e); + throw new RuntimeException(errorMessage, e); } recordConsumer.endMessage(); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 57a9bcc..b1d5ea0 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -148,7 +148,7 @@ public int execute(DriverContext driverContext) { job.setInputFormat((Class) (Class .forName(inpFormat))); } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); } job.setOutputKeyClass(NullWritable.class); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index 65785fe..7d947f1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -124,7 +124,7 @@ public int execute(DriverContext driverContext) { job.setInputFormat((Class) (Class .forName(inpFormat))); } catch (ClassNotFoundException e) { - throw new RuntimeException(e.getMessage()); + throw new RuntimeException(e.getMessage(), e); } Path outputPath = this.work.getOutputDir(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java index 6d57a8d..1a50a46 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java @@ -273,8 +273,8 @@ private void updateColStats(Set projIndxLst) { } } catch (HiveException e) { String logMsg = "Collecting stats failed."; - LOG.error(logMsg); - throw new RuntimeException(logMsg); + LOG.error(logMsg, e); + throw new RuntimeException(logMsg, e); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/xml/UDFXPathUtil.java ql/src/java/org/apache/hadoop/hive/ql/udf/xml/UDFXPathUtil.java index a5a8d64..7fc0ae5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/xml/UDFXPathUtil.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/xml/UDFXPathUtil.java @@ -71,7 +71,7 @@ public Object eval(String xml, String path, QName qname) { try { return expression.evaluate(inputSource, qname); } catch (XPathExpressionException e) { - throw new RuntimeException ("Invalid expression '" + oldPath + "'"); + throw new RuntimeException ("Invalid expression '" + oldPath + "'", e); } }