diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java index e1e45bc..dafc419 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreFsImpl.java @@ -25,8 +25,9 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.Trash; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.shims.HadoopShims; +import org.apache.hadoop.hive.shims.ShimLoader; public class HiveMetaStoreFsImpl implements MetaStoreFS { @@ -37,16 +38,10 @@ public class HiveMetaStoreFsImpl implements MetaStoreFS { public boolean deleteDir(FileSystem fs, Path f, boolean recursive, Configuration conf) throws MetaException { LOG.info("deleting " + f); - - // older versions of Hadoop don't have a Trash constructor based on the - // Path or FileSystem. So need to achieve this by creating a dummy conf. - // this needs to be filtered out based on version - Configuration dupConf = new Configuration(conf); - FileSystem.setDefaultUri(dupConf, fs.getUri()); + HadoopShims hadoopShim = ShimLoader.getHadoopShims(); try { - Trash trashTmp = new Trash(dupConf); - if (trashTmp.moveToTrash(f)) { + if (hadoopShim.moveToAppropriateTrash(fs, f, conf)) { LOG.info("Moved to trash: " + f); return true; } diff --git a/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java b/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java index 02ac55a..668d283 100644 --- a/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java +++ b/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; +import org.apache.hadoop.fs.Trash; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil; import org.apache.hadoop.io.Text; @@ -624,4 +625,17 @@ public class Hadoop20Shims implements HadoopShims { public String getJobLauncherHttpAddress(Configuration conf) { return conf.get("mapred.job.tracker.http.address"); } + + @Override + public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf) + throws IOException { + // older versions of Hadoop don't have a Trash constructor based on the + // Path or FileSystem. So need to achieve this by creating a dummy conf. + // this needs to be filtered out based on version + + Configuration dupConf = new Configuration(conf); + FileSystem.setDefaultUri(dupConf, fs.getUri()); + Trash trash = new Trash(dupConf); + return trash.moveToTrash(path); + } } diff --git a/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java b/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java index 7d04f57..0ba3672 100644 --- a/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java +++ b/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java @@ -17,10 +17,14 @@ */ package org.apache.hadoop.hive.shims; +import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.Trash; import org.apache.hadoop.hive.shims.HadoopShimsSecure; import org.apache.hadoop.mapred.ClusterStatus; import org.apache.hadoop.mapred.JobConf; @@ -93,4 +97,17 @@ public class Hadoop20SShims extends HadoopShimsSecure { public String getJobLauncherHttpAddress(Configuration conf) { return conf.get("mapred.job.tracker.http.address"); } + + @Override + public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf) + throws IOException { + // older versions of Hadoop don't have a Trash constructor based on the + // Path or FileSystem. So need to achieve this by creating a dummy conf. + // this needs to be filtered out based on version + + Configuration dupConf = new Configuration(conf); + FileSystem.setDefaultUri(dupConf, fs.getUri()); + Trash trash = new Trash(dupConf); + return trash.moveToTrash(path); + } } diff --git a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java index fdd16dd..b36f08f 100644 --- a/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java +++ b/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java @@ -17,11 +17,15 @@ */ package org.apache.hadoop.hive.shims; +import java.io.IOException; import java.lang.Integer; import java.net.MalformedURLException; import java.net.URL; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.Trash; import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState; import org.apache.hadoop.hive.shims.HadoopShimsSecure; import org.apache.hadoop.mapred.ClusterStatus; @@ -113,4 +117,9 @@ public class Hadoop23Shims extends HadoopShimsSecure { return conf.get("yarn.resourcemanager.webapp.address"); } + @Override + public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf) + throws IOException { + return Trash.moveToAppropriateTrash(fs, path, conf); + } } diff --git a/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java b/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java index 9959aca..89f9b4d 100644 --- a/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java +++ b/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java @@ -576,4 +576,8 @@ public abstract class HadoopShimsSecure implements HadoopShims { @Override abstract public String getJobLauncherRpcAddress(Configuration conf); + + @Override + abstract public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf) + throws IOException; } diff --git a/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java b/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java index e7d6619..bc3b058 100644 --- a/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ b/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -289,10 +289,23 @@ public interface HadoopShims { */ public String getJobLauncherHttpAddress(Configuration conf); + /** - * InputSplitShim. - * + * Move the directory/file to trash. In case of the symlinks or mount points, the file is + * moved to the trashbin in the actual volume of the path p being deleted + * @param fs + * @param path + * @param conf + * @return false if the item is already in the trash or trash is disabled + * @throws IOException */ + public boolean moveToAppropriateTrash(FileSystem fs, Path path, Configuration conf) + throws IOException; + + /** + * InputSplitShim. + * + */ public interface InputSplitShim extends InputSplit { JobConf getJob();