diff --git a/metastore/bin/.gitignore b/metastore/bin/.gitignore deleted file mode 100644 index 0dd9890..0000000 --- a/metastore/bin/.gitignore +++ /dev/null @@ -1 +0,0 @@ -# Dummy file to make Git recognize this empty directory diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote old mode 100644 new mode 100755 diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java index 06d7595..c76a143 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -3710,6 +3710,48 @@ public boolean update_partition_column_statistics(ColumnStatistics colStats) } finally { endFunction("write_partition_column_statistics: ", ret != false, null, tableName); } + } + public boolean update_partition_column_statistics( + SetPartitionsStatsRequest request) throws NoSuchObjectException, + InvalidObjectException, MetaException, TException, + InvalidInputException { + Map> map = new HashMap>(); + for (ColumnStatistics colStats : request.getColStats()) { + String dbName = null; + String tableName = null; + String partName = null; + String colName = null; + ColumnStatisticsDesc statsDesc = colStats.getStatsDesc(); + dbName = statsDesc.getDbName().toLowerCase(); + tableName = statsDesc.getTableName().toLowerCase(); + partName = lowerCaseConvertPartName(statsDesc.getPartName()); + statsDesc.setDbName(dbName); + statsDesc.setTableName(tableName); + statsDesc.setPartName(partName); + long time = System.currentTimeMillis() / 1000; + statsDesc.setLastAnalyzed(time); + List statsObjs = colStats.getStatsObj(); + for (ColumnStatisticsObj statsObj : statsObjs) { + colName = statsObj.getColName().toLowerCase(); + statsObj.setColName(colName); + startFunction("write_partition_column_statistics: db=" + dbName + + " table=" + tableName + " part=" + partName + "column=" + + colName); + } + colStats.setStatsDesc(statsDesc); + colStats.setStatsObj(statsObjs); + List partVals = getPartValsFromName(getMS(), dbName, tableName, + partName); + map.put(colStats, partVals); + } + boolean ret = false; + try { + ret = getMS().updatePartitionColumnStatistics(map); + return ret; + } finally { + endFunction("write_partition_column_statistics: ", ret != false, null, + null); + } } @Override @@ -5037,15 +5079,11 @@ public AggrStats get_aggr_stats_for(PartitionsStatsRequest request) public boolean set_aggr_stats_for(SetPartitionsStatsRequest request) throws NoSuchObjectException, InvalidObjectException, MetaException, InvalidInputException, TException { - boolean ret = true; - for (ColumnStatistics colStats : request.getColStats()) { - ret = ret && update_partition_column_statistics(colStats); - } - return ret; + return update_partition_column_statistics(request); } - } + public static IHMSHandler newHMSHandler(String name, HiveConf hiveConf) throws MetaException { return newHMSHandler(name, hiveConf, false); } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java index 0693039..0ce4293 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java @@ -5761,6 +5761,45 @@ public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List> map) + throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException { + boolean committed = false; + + try { + openTransaction(); + for (Entry> entry : map.entrySet()) { + ColumnStatistics colStats = entry.getKey(); + List partVals = entry.getValue(); + List statsObjs = colStats.getStatsObj(); + ColumnStatisticsDesc statsDesc = colStats.getStatsDesc(); + Table table = ensureGetTable(statsDesc.getDbName(), + statsDesc.getTableName()); + Partition partition = convertToPart(getMPartition( + statsDesc.getDbName(), statsDesc.getTableName(), partVals)); + for (ColumnStatisticsObj statsObj : statsObjs) { + // We have to get partition again because DataNucleus + MPartition mPartition = getMPartition(statsDesc.getDbName(), + statsDesc.getTableName(), partVals); + if (partition == null) { + throw new NoSuchObjectException( + "Partition for which stats is gathered doesn't exist."); + } + MPartitionColumnStatistics mStatsObj = StatObjectConverter + .convertToMPartitionColumnStatistics(mPartition, statsDesc, + statsObj); + writeMPartitionColumnStatistics(table, partition, mStatsObj); + } + } + committed = commitTransaction(); + return committed; + } finally { + if (!committed) { + rollbackTransaction(); + } + } + } + private List getMTableColumnStatistics( Table table, List colNames) throws MetaException { boolean committed = false; diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java index e435d69..a19b5a5 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java @@ -551,4 +551,8 @@ public void dropFunction(String dbName, String funcName) public AggrStats get_aggr_stats_for(String dbName, String tblName, List partNames, List colNames) throws MetaException, NoSuchObjectException; + + boolean updatePartitionColumnStatistics( + Map> map) throws NoSuchObjectException, + MetaException, InvalidObjectException, InvalidInputException; } diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java index 3847d99..3b0082d 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java @@ -719,5 +719,12 @@ public AggrStats get_aggr_stats_for(String dbName, return null; } + @Override + public boolean updatePartitionColumnStatistics( + Map> map) throws NoSuchObjectException, + MetaException, InvalidObjectException, InvalidInputException { + return objectStore.updatePartitionColumnStatistics(map); + } + } diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index 981fa1a..6864d90 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -735,6 +735,13 @@ public AggrStats get_aggr_stats_for(String dbName, throws MetaException { return null; } + + @Override + public boolean updatePartitionColumnStatistics( + Map> map) throws NoSuchObjectException, + MetaException, InvalidObjectException, InvalidInputException { + return false; + } } diff --git a/ql/.gitignore b/ql/.gitignore deleted file mode 100644 index 916e17c..0000000 --- a/ql/.gitignore +++ /dev/null @@ -1 +0,0 @@ -dependency-reduced-pom.xml diff --git a/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote b/service/src/gen/thrift/gen-py/TCLIService/TCLIService-remote old mode 100644 new mode 100755 diff --git a/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote b/service/src/gen/thrift/gen-py/hive_service/ThriftHive-remote old mode 100644 new mode 100755