diff --git metastore/src/java/org/apache/hadoop/hive/metastore/TransactionalValidationListener.java metastore/src/java/org/apache/hadoop/hive/metastore/TransactionalValidationListener.java index 78c0eb9..bd85aef 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/TransactionalValidationListener.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/TransactionalValidationListener.java @@ -18,10 +18,10 @@ package org.apache.hadoop.hive.metastore; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.RemoteIterator; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.metastore.api.*; import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent; import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent; @@ -31,6 +31,8 @@ import java.io.IOException; import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; @@ -90,7 +92,14 @@ private void handleAlterTableTransactionalProp(PreAlterTableEvent context) throw //normalize prop name parameters.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, transactionalValue); } - if ("true".equalsIgnoreCase(transactionalValue)) { + Table oldTable = context.getOldTable(); + String oldTransactionalValue = null; + for (String key : oldTable.getParameters().keySet()) { + if (hive_metastoreConstants.TABLE_IS_TRANSACTIONAL.equalsIgnoreCase(key)) { + oldTransactionalValue = oldTable.getParameters().get(key); + } + } + if ("true".equalsIgnoreCase(transactionalValue) && !"true".equalsIgnoreCase(oldTransactionalValue)) { if (!conformToAcid(newTable)) { throw new MetaException("The table must be bucketed and stored using an ACID compliant" + " format (such as ORC)"); @@ -108,13 +117,6 @@ private void handleAlterTableTransactionalProp(PreAlterTableEvent context) throw return; } - Table oldTable = context.getOldTable(); - String oldTransactionalValue = null; - for (String key : oldTable.getParameters().keySet()) { - if (hive_metastoreConstants.TABLE_IS_TRANSACTIONAL.equalsIgnoreCase(key)) { - oldTransactionalValue = oldTable.getParameters().get(key); - } - } if (oldTransactionalValue == null ? transactionalValue == null : oldTransactionalValue.equalsIgnoreCase(transactionalValue)) { //this covers backward compat cases where this prop may have been set already @@ -222,19 +224,24 @@ boolean containsCopyNFiles(RawStore ms, Table table) throws MetaException { tablePath = wh.getDnsPath(new Path(table.getSd().getLocation())); } FileSystem fs = wh.getFs(tablePath); - RemoteIterator iterator = fs.listFiles(tablePath, true); - while (iterator.hasNext()) { - LocatedFileStatus fileStatus = iterator.next(); + List fileStatuses = new LinkedList<>(); + // FileUtils.listStatusRecursively() will omit hidden files + FileUtils.listStatusRecursively(fs, fs.getFileStatus(tablePath), fileStatuses); + for (FileStatus fileStatus : fileStatuses) { if (COPY_N_PATTERN.matcher(fileStatus.getPath().getName()).matches()) { return true; } } } catch (IOException e) { - throw new MetaException("Unable to list files for " + table.getDbName() + "."+ - table.getTableName()); + String errorMessage = "Unable to list files for " + table.getDbName() + "."+ + table.getTableName(); + LOG.error("IOException during listing copyNFiles: ", e); + throw new MetaException(errorMessage); } catch (NoSuchObjectException e) { - throw new MetaException("Unable to get location for " + table.getDbName() + "."+ - table.getTableName()); + String errorMessage = "Unable to get location for " + table.getDbName() + "."+ + table.getTableName(); + LOG.error(errorMessage, e); + throw new MetaException(errorMessage); } return false;