diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 2fe86e1..ca9bd02 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -91,6 +91,7 @@ import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; +import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.util.StringUtils; import org.apache.thrift.TException; @@ -2327,7 +2328,9 @@ static protected void replaceFiles(Path srcf, Path destf, Path oldPath, HiveConf // use FsShell to move data to .Trash first rather than delete permanently FsShell fshell = new FsShell(); fshell.setConf(conf); - fshell.run(new String[]{"-rmr", oldPath.toString()}); + String[] rmr = isHadoop1() ? new String[]{"-rmr", oldPath.toString()} : + new String[]{"-rm", "-r", oldPath.toString()}; + fshell.run(rmr); } } catch (Exception e) { //swallow the exception @@ -2372,6 +2375,10 @@ static protected void replaceFiles(Path srcf, Path destf, Path oldPath, HiveConf } } + public static boolean isHadoop1() { + return ShimLoader.getMajorVersion().startsWith("0.20"); + } + public void exchangeTablePartitions(Map partitionSpecs, String sourceDb, String sourceTable, String destDb, String destinationTableName) throws HiveException {