diff --git common/src/java/org/apache/hadoop/hive/common/FileUtils.java common/src/java/org/apache/hadoop/hive/common/FileUtils.java index 0607913..dff042a 100644 --- common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -650,18 +650,28 @@ public static void checkDeletePermission(Path path, Configuration conf, String u // use case. super users can also do a chown to be able to drop the file final FileSystem fs = path.getFileSystem(conf); - if (!fs.exists(path)) { + // check user has write permissions on the parent dir + FileStatus stat = null; + try { + stat = fs.getFileStatus(path); + } catch (FileNotFoundException e) { + // ignore + } + if (stat == null) { // no file/dir to be deleted return; } - Path parPath = path.getParent(); - // check user has write permissions on the parent dir - FileStatus stat = fs.getFileStatus(path); FileUtils.checkFileAccessWithImpersonation(fs, stat, FsAction.WRITE, user); + HadoopShims shims = ShimLoader.getHadoopShims(); + if (!shims.supportStickyBit()) { + // not supports sticky bit + return; + } + // check if sticky bit is set on the parent dir - FileStatus parStatus = fs.getFileStatus(parPath); - if (!parStatus.getPermission().getStickyBit()) { + FileStatus parStatus = fs.getFileStatus(path.getParent()); + if (!shims.hasStickyBit(parStatus.getPermission())) { // no sticky bit, so write permission on parent dir is sufficient // no further checks needed return; diff --git shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java index aec6796..faae060 100644 --- shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java +++ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java @@ -55,6 +55,7 @@ import org.apache.hadoop.fs.ProxyFileSystem; import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil; import org.apache.hadoop.io.LongWritable; @@ -895,4 +896,14 @@ public String getPassword(Configuration conf, String name) { // No password API, just retrieve value from conf return conf.get(name); } + + @Override + public boolean supportStickyBit() { + return false; + } + + @Override + public boolean hasStickyBit(FsPermission permission) { + return false; // not supported + } } diff --git shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java index 8bc871ac..a809eb1 100644 --- shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java +++ shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.ProxyFileSystem; import org.apache.hadoop.fs.Trash; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapred.ClusterStatus; @@ -513,4 +514,14 @@ public String getPassword(Configuration conf, String name) { // No password API, just retrieve value from conf return conf.get(name); } + + @Override + public boolean supportStickyBit() { + return false; + } + + @Override + public boolean hasStickyBit(FsPermission permission) { + return false; // not supported + } } diff --git shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java index 6f4820e..262477e 100644 --- shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java +++ shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java @@ -19,21 +19,17 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URI; import java.security.AccessControlException; -import java.security.PrivilegedExceptionAction; import java.util.ArrayList; -import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; @@ -816,4 +812,14 @@ public String getPassword(Configuration conf, String name) throws IOException { } } } + + @Override + public boolean supportStickyBit() { + return true; + } + + @Override + public boolean hasStickyBit(FsPermission permission) { + return permission.getStickyBit(); + } } diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index eed4f5b..964c38d 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -44,6 +44,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.fs.permission.FsAction; +import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.mapred.ClusterStatus; import org.apache.hadoop.mapred.InputSplit; @@ -694,4 +695,16 @@ public void checkFileAccess(FileSystem fs, FileStatus status, FsAction action) */ public String getPassword(Configuration conf, String name) throws IOException; + /** + * check whether current hadoop supports sticky bit + * @return + */ + boolean supportStickyBit(); + + /** + * Check stick bit in the permission + * @param permission + * @return sticky bit + */ + boolean hasStickyBit(FsPermission permission); }