diff --git metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java index fde1b54..2cdebc8 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java @@ -860,7 +860,8 @@ public void markFailed(CompactionInfo ci) throws MetaException {//todo: this sho //compactions are not happening. ci.state = ATTEMPTED_STATE; //this is not strictly accurate, but 'type' cannot be null. - ci.type = CompactionType.MINOR; + if(ci.type == null) { ci.type = CompactionType.MINOR; } + ci.start = getDbTime(dbConn); } else { ci.state = FAILED_STATE; @@ -874,7 +875,7 @@ public void markFailed(CompactionInfo ci) throws MetaException {//todo: this sho closeStmt(pStmt); dbConn.commit(); } catch (SQLException e) { - LOG.error("Unable to delete from compaction queue " + e.getMessage()); + LOG.warn("markFailed(" + ci.id + "):" + e.getMessage()); LOG.debug("Going to rollback"); rollbackDBConn(dbConn); try { @@ -883,7 +884,7 @@ public void markFailed(CompactionInfo ci) throws MetaException {//todo: this sho catch(MetaException ex) { LOG.error("Unable to connect to transaction database " + StringUtils.stringifyException(ex)); } - LOG.error("Unable to connect to transaction database " + StringUtils.stringifyException(e)); + LOG.error("markFailed(" + ci + ") failed: " + e.getMessage(), e); } finally { close(rs, stmt, null); close(null, pStmt, dbConn); diff --git metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java index b0fa836..0435b5b 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java @@ -1537,8 +1537,11 @@ public ShowCompactResponse showCompact(ShowCompactRequest rqst) throws MetaExcep e.setEndTime(endTime); } e.setRunAs(rs.getString(9)); - e.setHadoopJobId(rs.getString(10)); - long id = rs.getLong(11);//for debugging + String jobId = rs.getString(10); + if(jobId != null && jobId.length() > 0) { + e.setHadoopJobId(jobId); + } + e.setId(rs.getLong(11)); response.addToCompacts(e); } LOG.debug("Going to rollback"); @@ -1943,12 +1946,12 @@ protected void checkRetryable(Connection conn, } sendRetrySignal = true; } else { - LOG.error("Fatal error. Retry limit (" + retryLimit + ") reached. Last error: " + getMessage(e)); + LOG.error("Fatal error in " + caller + ". Retry limit (" + retryLimit + ") reached. Last error: " + getMessage(e)); } } else { //make sure we know we saw an error that we don't recognize - LOG.info("Non-retryable error: " + getMessage(e)); + LOG.info("Non-retryable error in " + caller + " : " + getMessage(e)); } } finally { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index 0ac9053..9078173 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -2740,6 +2740,8 @@ private int showCompactions(Hive db, ShowCompactionsDesc desc) throws HiveExcept ShowCompactResponse rsp = db.showCompactions(); // Write the results into the file + final String noVal = " --- "; + DataOutputStream os = getOutputStream(desc.getResFile()); try { // Write a header @@ -2756,6 +2758,10 @@ private int showCompactions(Hive db, ShowCompactionsDesc desc) throws HiveExcept os.writeBytes("Worker"); os.write(separator); os.writeBytes("Start Time"); + os.write(separator); + os.writeBytes("Duration(ms)"); + os.write(separator); + os.writeBytes("HadoopJobId"); os.write(terminator); if (rsp.getCompacts() != null) { @@ -2765,16 +2771,20 @@ private int showCompactions(Hive db, ShowCompactionsDesc desc) throws HiveExcept os.writeBytes(e.getTablename()); os.write(separator); String part = e.getPartitionname(); - os.writeBytes(part == null ? "NULL" : part); + os.writeBytes(part == null ? noVal : part); os.write(separator); os.writeBytes(e.getType().toString()); os.write(separator); os.writeBytes(e.getState()); os.write(separator); String wid = e.getWorkerid(); - os.writeBytes(wid == null ? "NULL" : wid); + os.writeBytes(wid == null ? noVal : wid); os.write(separator); os.writeBytes(Long.toString(e.getStart())); + os.write(separator); + os.writeBytes(e.isSetEndTime() ? Long.toString(e.getEndTime() - e.getStart()) : noVal); + os.write(separator); + os.writeBytes(e.isSetHadoopJobId() ? e.getHadoopJobId() : noVal); os.write(terminator); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCompactionsDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCompactionsDesc.java index 94fd289..dc47a38 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCompactionsDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCompactionsDesc.java @@ -28,7 +28,7 @@ private static final long serialVersionUID = 1L; private static final String schema = "dbname,tabname,partname,type,state,workerid," + - "starttime#string:string:string:string:string:string:string"; + "starttime,duration,hadoopjobid#string:string:string:string:string:string:string:string:string"; private String resFile;