diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java index 5e317ab..3de4b83 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java @@ -27,11 +27,13 @@ import java.sql.SQLIntegrityConstraintViolationException; import java.sql.SQLRecoverableException; import java.sql.Statement; +import java.sql.Types; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -155,7 +157,13 @@ public Void run(PreparedStatement stmt) throws SQLException { try { insStmt.setString(1, fileID); for (int i = 0; i < JDBCStatsUtils.getSupportedStatistics().size(); i++) { - insStmt.setString(i + 2, stats.get(supportedStatistics.get(i))); + String valLong = StringUtils.trim(stats.get(supportedStatistics.get(i))); + LOG.debug("Publishing stat [" + supportedStatistics.get(i) + "] [" + valLong + "]"); + if (StringUtils.isNotBlank(valLong)) { + insStmt.setLong(i + 2, Long.parseLong(valLong)); + } else { + insStmt.setNull(i + 2, Types.BIGINT); + } } Utilities.executeWithRetry(execUpdate, insStmt, waitWindow, maxRetries); return true; @@ -279,7 +287,7 @@ public boolean init(Configuration hconf) { // Check if the table exists DatabaseMetaData dbm = conn.getMetaData(); - rs = dbm.getTables(null, null, JDBCStatsUtils.getStatTableName(), null); + rs = dbm.getTables(null, null, JDBCStatsUtils.getStatTableName().toLowerCase(), null); boolean tblExists = rs.next(); if (!tblExists) { // Table does not exist, create it String createTable = JDBCStatsUtils.getCreate("");