commit c63c5e5c1f96a60b8278b81c883a84175f021727 Author: Chris Drome Date: Fri Jun 10 00:50:24 2016 +0000 HIVE-13989: Extended ACLs are not handled according to specification diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java index 9db3dc1..7b50afd 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java @@ -32,6 +32,10 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.AclEntry; +import org.apache.hadoop.fs.permission.AclEntryScope; +import org.apache.hadoop.fs.permission.AclEntryType; +import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; @@ -331,7 +335,7 @@ private Partition constructPartition( String partLocnRoot, String dynPartPath, Map partKVs, HCatSchema outputSchema, Map params, Table table, FileSystem fs, - String grpName, FsPermission perms) throws IOException { + String grpName, FsPermission perms, List acls) throws IOException { Partition partition = new Partition(); partition.setDbName(table.getDbName()); @@ -368,7 +372,7 @@ private Partition constructPartition( for (FieldSchema partKey : table.getPartitionKeys()) { if (i++ != 0) { fs.mkdirs(partPath); // Attempt to make the path in case it does not exist before we check - applyGroupAndPerms(fs, partPath, perms, grpName, false); + applyGroupAndPerms(fs, partPath, perms, acls, grpName, false); } partPath = constructPartialPartPath(partPath, partKey.getName().toLowerCase(), partKVs); } @@ -378,7 +382,7 @@ private Partition constructPartition( // Need not bother in case of HDFS as permission is taken care of by setting UMask fs.mkdirs(partPath); // Attempt to make the path in case it does not exist before we check if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) { - applyGroupAndPerms(fs, partPath, perms, grpName, true); + applyGroupAndPerms(fs, partPath, perms, acls, grpName, true); } // Set the location in the StorageDescriptor @@ -398,7 +402,7 @@ private Partition constructPartition( } private void applyGroupAndPerms(FileSystem fs, Path dir, FsPermission permission, - String group, boolean recursive) + List acls, String group, boolean recursive) throws IOException { if(LOG.isDebugEnabled()) { LOG.debug("applyGroupAndPerms : " + dir + @@ -406,10 +410,13 @@ private void applyGroupAndPerms(FileSystem fs, Path dir, FsPermission permission " group: " + group + " recursive: " + recursive); } fs.setPermission(dir, permission); + if ((acls != null) && (acls.size() > 0)) { + fs.setAcl(dir, acls); + } if (recursive) { for (FileStatus fileStatus : fs.listStatus(dir)) { if (fileStatus.isDir()) { - applyGroupAndPerms(fs, fileStatus.getPath(), permission, group, true); + applyGroupAndPerms(fs, fileStatus.getPath(), permission, acls, group, true); } else { fs.setPermission(fileStatus.getPath(), permission); } @@ -786,6 +793,18 @@ private void registerPartitions(JobContext context) throws IOException{ FileStatus tblStat = fs.getFileStatus(tblPath); String grpName = tblStat.getGroup(); FsPermission perms = tblStat.getPermission(); + ArrayList acls = null; + + if (conf.getBoolean("dfs.namenode.acls.enabled", false)) { + try { + AclStatus stat = fs.getAclStatus(tblPath); + if (hasExtendedAcls(stat)) { + acls = getDefaultAclEntries(stat, perms); + } + } catch (UnsupportedOperationException e) { + LOG.debug("Skipping ACLs", e); + } + } List partitionsToAdd = new ArrayList(); if (!dynamicPartitioningUsed){ @@ -795,7 +814,7 @@ private void registerPartitions(JobContext context) throws IOException{ tblPath.toString(), null, jobInfo.getPartitionValues() ,jobInfo.getOutputSchema(), getStorerParameterMap(storer) ,table, fs - ,grpName,perms)); + ,grpName,perms, acls)); }else{ for (Entry> entry : partitionsDiscoveredByPath.entrySet()){ partitionsToAdd.add( @@ -805,7 +824,7 @@ private void registerPartitions(JobContext context) throws IOException{ ,entry.getKey(), entry.getValue() ,jobInfo.getOutputSchema(), getStorerParameterMap(storer) ,table, fs - ,grpName,perms)); + ,grpName,perms, acls)); } } @@ -941,7 +960,7 @@ private void registerPartitions(JobContext context) throws IOException{ // Set permissions appropriately for each of the partitions we just created // so as to have their permissions mimic the table permissions for (Partition p : partitionsAdded){ - applyGroupAndPerms(fs,new Path(p.getSd().getLocation()),tblStat.getPermission(),tblStat.getGroup(),true); + applyGroupAndPerms(fs,new Path(p.getSd().getLocation()),tblStat.getPermission(),acls,tblStat.getGroup(),true); } } @@ -1015,5 +1034,80 @@ private void cancelDelegationTokens(JobContext context) throws IOException{ } } + private ArrayList getDefaultAclEntries(AclStatus stat, FsPermission perms) { + ArrayList defaults = new ArrayList(); + + boolean[] hasDefaults = { false, false, false, false }; + + for (AclEntry e : stat.getEntries()) { + if (e.getScope() == AclEntryScope.DEFAULT) { + AclEntry acl = new AclEntry.Builder().setName(e.getName()).setScope(AclEntryScope.ACCESS) + .setType(e.getType()).setPermission(e.getPermission()).build(); + + defaults.add(acl); + defaults.add(e); + + if (e.getName() == null) { + if (e.getType() == AclEntryType.USER) { + hasDefaults[0] = true; + } + if (e.getType() == AclEntryType.GROUP) { + hasDefaults[1] = true; + } + if (e.getType() == AclEntryType.OTHER) { + hasDefaults[2] = true; + } + if (e.getType() == AclEntryType.MASK) { + hasDefaults[3] = true; + } + } + } + } + + if (! hasDefaults[0]) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.USER).setPermission(perms.getUserAction()).build(); + defaults.add(acl); + hasDefaults[0] = true; + } + + if (! hasDefaults[1]) { + for (AclEntry e : stat.getEntries()) { + if ((e.getScope() == AclEntryScope.ACCESS) && (e.getType() == AclEntryType.GROUP) && (e.getName() == null)) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.GROUP).setPermission(e.getPermission()).build(); + defaults.add(acl); + + hasDefaults[1] = true; + } + } + } + + if (! hasDefaults[2]) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.OTHER).setPermission(perms.getOtherAction()).build(); + defaults.add(acl); + + hasDefaults[2] = true; + } + + if (! hasDefaults[3]) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.MASK).setPermission(perms.getGroupAction()).build(); + defaults.add(acl); + + hasDefaults[3] = true; + } + + return defaults; + } + + private boolean hasExtendedAcls(AclStatus status) { + if (status != null) { + return status.getEntries().size() > 0; + } + + return false; + } } diff --git itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java index b798379..fbc8f14 100644 --- itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java +++ itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/ql/security/TestExtendedAcls.java @@ -18,12 +18,14 @@ package org.apache.hadoop.hive.ql.security; import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS; +import static org.apache.hadoop.fs.permission.AclEntryScope.DEFAULT; import static org.apache.hadoop.fs.permission.AclEntryType.GROUP; import static org.apache.hadoop.fs.permission.AclEntryType.OTHER; import static org.apache.hadoop.fs.permission.AclEntryType.USER; import java.util.List; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntryScope; @@ -56,6 +58,19 @@ public static void setup() throws Exception { aclEntry(ACCESS, USER, "bar", FsAction.READ_WRITE), aclEntry(ACCESS, USER, "foo", FsAction.READ_EXECUTE), aclEntry(ACCESS, GROUP, "bar", FsAction.READ_WRITE), + aclEntry(ACCESS, GROUP, "foo", FsAction.READ_EXECUTE), + aclEntry(DEFAULT, USER, "bar", FsAction.READ_WRITE), + aclEntry(DEFAULT, USER, "foo", FsAction.READ_EXECUTE), + aclEntry(DEFAULT, GROUP, "bar", FsAction.READ_WRITE), + aclEntry(DEFAULT, GROUP, "foo", FsAction.READ_EXECUTE)); + + private final ImmutableList aclSpec1ForFile = ImmutableList.of( + aclEntry(ACCESS, USER, FsAction.ALL), + aclEntry(ACCESS, GROUP, FsAction.ALL), + aclEntry(ACCESS, OTHER, FsAction.ALL), + aclEntry(ACCESS, USER, "bar", FsAction.READ_WRITE), + aclEntry(ACCESS, USER, "foo", FsAction.READ_EXECUTE), + aclEntry(ACCESS, GROUP, "bar", FsAction.READ_WRITE), aclEntry(ACCESS, GROUP, "foo", FsAction.READ_EXECUTE)); private final ImmutableList aclSpec2 = ImmutableList.of( @@ -65,16 +80,29 @@ public static void setup() throws Exception { aclEntry(ACCESS, USER, "bar2", FsAction.READ_WRITE), aclEntry(ACCESS, USER, "foo2", FsAction.READ_EXECUTE), aclEntry(ACCESS, GROUP, "bar2", FsAction.READ), + aclEntry(ACCESS, GROUP, "foo2", FsAction.READ_EXECUTE), + aclEntry(DEFAULT, USER, "bar2", FsAction.READ_WRITE), + aclEntry(DEFAULT, USER, "foo2", FsAction.READ_EXECUTE), + aclEntry(DEFAULT, GROUP, "bar2", FsAction.READ), + aclEntry(DEFAULT, GROUP, "foo2", FsAction.READ_EXECUTE)); + + private final ImmutableList aclSpec2ForFile = ImmutableList.of( + aclEntry(ACCESS, USER, FsAction.ALL), + aclEntry(ACCESS, GROUP, FsAction.ALL), + aclEntry(ACCESS, OTHER, FsAction.READ_EXECUTE), + aclEntry(ACCESS, USER, "bar2", FsAction.READ_WRITE), + aclEntry(ACCESS, USER, "foo2", FsAction.READ_EXECUTE), + aclEntry(ACCESS, GROUP, "bar2", FsAction.READ), aclEntry(ACCESS, GROUP, "foo2", FsAction.READ_EXECUTE)); @Override - public void setPermission(String locn, int permIndex) throws Exception { + public void setPermission(String locn, int permIndex, boolean isFile) throws Exception { switch (permIndex) { case 0: - setAcl(locn, aclSpec1); + setAcl(locn, isFile ? aclSpec1ForFile : aclSpec1, isFile); break; case 1: - setAcl(locn, aclSpec2); + setAcl(locn, isFile ? aclSpec2ForFile : aclSpec2, isFile); break; default: throw new RuntimeException("Only 2 permissions by this test"); @@ -82,21 +110,27 @@ public void setPermission(String locn, int permIndex) throws Exception { } @Override + public void setPermission(String locn, int permIndex) throws Exception { + setPermission(locn, permIndex, false); + } + + @Override public void verifyPermission(String locn, int permIndex) throws Exception { + FileStatus fstat = fs.getFileStatus(new Path(locn)); + FsPermission perm = fstat.getPermission(); + switch (permIndex) { case 0: - FsPermission perm = fs.getFileStatus(new Path(locn)).getPermission(); Assert.assertEquals("Location: " + locn, "rwxrwxrwx", String.valueOf(perm)); List actual = getAcl(locn); - verifyAcls(aclSpec1, actual); + verifyAcls(aclSpec1, actual, fstat.isFile()); break; case 1: - perm = fs.getFileStatus(new Path(locn)).getPermission(); Assert.assertEquals("Location: " + locn, "rwxrwxr-x", String.valueOf(perm)); List acls = getAcl(locn); - verifyAcls(aclSpec2, acls); + verifyAcls(aclSpec2, acls, fstat.isFile()); break; default: throw new RuntimeException("Only 2 permissions by this test: " + permIndex); @@ -139,9 +173,14 @@ private AclEntry aclEntry(AclEntryScope scope, AclEntryType type, .setPermission(permission).build(); } - private void verifyAcls(List expectedList, List actualList) { + private void verifyAcls(List expectedList, List actualList, boolean isFile) { for (AclEntry expected : expectedList) { if (expected.getName() != null) { + if (isFile && expected.getScope() == DEFAULT) { + // Files will not inherit default extended ACL rules from its parent, so ignore them. + continue; + } + //the non-named acl's are coming as regular permission, and not as aclEntries. boolean found = false; for (AclEntry actual : actualList) { @@ -156,7 +195,7 @@ private void verifyAcls(List expectedList, List actualList) } } - private void setAcl(String locn, List aclSpec) throws Exception { + private void setAcl(String locn, List aclSpec, boolean isFile) throws Exception { fs.setAcl(new Path(locn), aclSpec); } diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java index 2ae9cc0..8a69107 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/FolderPermissionBase.java @@ -65,6 +65,9 @@ public boolean accept(Path p){ public abstract void verifyPermission(String locn, int permIndex) throws Exception; + public void setPermission(String locn, int permIndex, boolean isFile) throws Exception { + setPermission(locn, permIndex); + } public void setPermission(String locn) throws Exception { setPermission(locn, 0); @@ -465,7 +468,7 @@ public void testLoadLocal() throws Exception { //case1B: load data local into overwrite non-partitioned-table setPermission(warehouseDir + "/" + tableName, 1); for (String child : listStatus(tableLoc)) { - setPermission(child, 1); + setPermission(child, 1, true); } ret = driver.run("load data local inpath '" + dataFilePath + "' overwrite into table " + tableName); Assert.assertEquals(0,ret.getResponseCode()); @@ -499,7 +502,7 @@ public void testLoadLocal() throws Exception { setPermission(tableLoc, 1); setPermission(partLoc, 1); for (String child : listStatus(partLoc)) { - setPermission(child, 1); + setPermission(child, 1, true); } ret = driver.run("LOAD DATA LOCAL INPATH '" + dataFilePath + "' OVERWRITE INTO TABLE " + tableName + " PARTITION (part1='1',part2='1')"); Assert.assertEquals(0,ret.getResponseCode()); @@ -536,7 +539,7 @@ public void testLoad() throws Exception { //case1B: load data into overwrite non-partitioned-table setPermission(warehouseDir + "/" + tableName, 1); for (String child : listStatus(tableLoc)) { - setPermission(child, 1); + setPermission(child, 1, true); } fs.copyFromLocalFile(dataFilePath, new Path(location)); @@ -574,7 +577,7 @@ public void testLoad() throws Exception { setPermission(partLoc, 1); Assert.assertTrue(listStatus(partLoc).size() > 0); for (String child : listStatus(partLoc)) { - setPermission(child, 1); + setPermission(child, 1, true); } fs.copyFromLocalFile(dataFilePath, new Path(location)); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 2b01fce..7ad43bc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2720,7 +2720,7 @@ private static void copyFiles(final HiveConf conf, final FileSystem destFs, } if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, fullDestStatus, srcGroup, destFs, destPath, false); + HdfsUtils.setFullFileStatus(conf, fullDestStatus, srcGroup, destFs, destPath, false, false); } if (null != newFiles) { newFiles.add(destPath); @@ -2916,7 +2916,7 @@ public Void call() throws Exception { final String group = srcStatus.getGroup(); if(destFs.rename(srcStatus.getPath(), destf)) { if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, desiredStatus, group, destFs, destPath, false); + HdfsUtils.setFullFileStatus(conf, desiredStatus, group, destFs, destPath, false, false); } } else { throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest path:" diff --git shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java index 70a6857..74896e4 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java +++ shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.io; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.ArrayUtils; @@ -63,25 +64,20 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, String targetGroup, FileSystem fs, Path target, boolean recursion) throws IOException { + // Assume source is a directory for purposes of ACLs. + setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursion, true); + } + + // This method should be called when dealing with ACLs on files, because default ACLs cannot + // be applied to files; an exception will be thrown. + public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, + String targetGroup, FileSystem fs, Path target, boolean recursion, boolean isDir) throws IOException { FileStatus fStatus= sourceStatus.getFileStatus(); String group = fStatus.getGroup(); boolean aclEnabled = Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true"); FsPermission sourcePerm = fStatus.getPermission(); List aclEntries = null; AclStatus aclStatus; - if (aclEnabled) { - aclStatus = sourceStatus.getAclStatus(); - if (aclStatus != null) { - LOG.trace(aclStatus.toString()); - aclEntries = aclStatus.getEntries(); - removeBaseAclEntries(aclEntries); - - //the ACL api's also expect the tradition user/group/other permission in the form of ACL - aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.USER, sourcePerm.getUserAction())); - aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.GROUP, sourcePerm.getGroupAction())); - aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.OTHER, sourcePerm.getOtherAction())); - } - } if (recursion) { //use FsShell to change group, permissions, and extended ACL's recursively @@ -93,19 +89,25 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta if (group != null && !group.isEmpty()) { run(fsShell, new String[]{"-chgrp", "-R", group, target.toString()}); } + if (aclEnabled) { + if (hasExtendedAcls(sourceStatus)) { + aclEntries = getDefaultAclEntries(sourceStatus, true); + } + if (null != aclEntries) { - //Attempt extended Acl operations only if its enabled, 8791but don't fail the operation regardless. try { - //construct the -setfacl command + //Attempt extended Acl operations only if its enabled, 8791but don't fail the operation regardless. String aclEntry = Joiner.on(",").join(aclEntries); run(fsShell, new String[]{"-setfacl", "-R", "--set", aclEntry, target.toString()}); - } catch (Exception e) { LOG.info("Skipping ACL inheritance: File system for path " + target + " " + "does not support ACLs but dfs.namenode.acls.enabled is set to true. "); LOG.debug("The details are: " + e, e); } + } else { + String permission = Integer.toString(sourceStatus.getFileStatus().getPermission().toShort(), 8); + run(fsShell, new String[]{"-chmod", "-R", permission, target.toString()}); } } else { String permission = Integer.toString(sourcePerm.toShort(), 8); @@ -121,9 +123,18 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta fs.setOwner(target, null, group); } } + if (aclEnabled) { + if (hasExtendedAcls(sourceStatus)) { + // Include default ACLs for directories only. + // Attempting to set defaults on files will throw an exception. + aclEntries = getDefaultAclEntries(sourceStatus, isDir); + } + if (null != aclEntries) { fs.setAcl(target, aclEntries); + } else { + fs.setPermission(target, sourcePerm); } } else { fs.setPermission(target, sourcePerm); @@ -148,19 +159,97 @@ private static AclEntry newAclEntry(AclEntryScope scope, AclEntryType type, .setPermission(permission).build(); } /** - * Removes basic permission acls (unamed acls) from the list of acl entries - * @param entries acl entries to remove from. + * Generates a list of AclEntrys as determined from src. + * First iterate through all of the AclEntrys and extract the DEFAULT rules, from which + * we will create an ACCESS rule as well. If DEFAULT rules were not found for user, group, other, or + * mask, then inherit them from the basic permissions of src. Because the group permission + * is now the mask, we need to pull the group permission from list of extended ACLs. + * + * @param src the FileStatus from which to inherit permissions. + * @return a list of AclEntrys to inherit from src. */ - private static void removeBaseAclEntries(List entries) { - Iterables.removeIf(entries, new Predicate() { - @Override - public boolean apply(AclEntry input) { - if (input.getName() == null) { - return true; + private static List getDefaultAclEntries(HadoopFileStatus src, boolean includeDefaults) { + ArrayList defaults = new ArrayList(); + + boolean[] hasDefaults = { false, false, false, false }; + + for (AclEntry e : src.getAclStatus().getEntries()) { + if (e.getScope() == AclEntryScope.DEFAULT) { + AclEntry acl = new AclEntry.Builder().setName(e.getName()).setScope(AclEntryScope.ACCESS) + .setType(e.getType()).setPermission(e.getPermission()).build(); + + defaults.add(acl); + if (includeDefaults) { + defaults.add(e); + } + + if (e.getName() == null) { + if (e.getType() == AclEntryType.USER) { + hasDefaults[0] = true; + } + if (e.getType() == AclEntryType.GROUP) { + hasDefaults[1] = true; + } + if (e.getType() == AclEntryType.OTHER) { + hasDefaults[2] = true; + } + if (e.getType() == AclEntryType.MASK) { + hasDefaults[3] = true; + } } - return false; } - }); + } + + if (! hasDefaults[0]) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.USER).setPermission(src.getFileStatus().getPermission().getUserAction()).build(); + defaults.add(acl); + + hasDefaults[0] = true; + } + + if (! hasDefaults[1]) { + for (AclEntry e : src.getAclStatus().getEntries()) { + if ((e.getScope() == AclEntryScope.ACCESS) && (e.getType() == AclEntryType.GROUP) && (e.getName() == null)) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.GROUP).setPermission(e.getPermission()).build(); + defaults.add(acl); + + hasDefaults[1] = true; + } + } + } + + if (! hasDefaults[2]) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.OTHER).setPermission(src.getFileStatus().getPermission().getOtherAction()).build(); + defaults.add(acl); + + hasDefaults[2] = true; + } + + if (! hasDefaults[3]) { + AclEntry acl = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) + .setType(AclEntryType.MASK).setPermission(src.getFileStatus().getPermission().getGroupAction()).build(); + defaults.add(acl); + + hasDefaults[3] = true; + } + + return defaults; + } + + /** + * Returns true if extended ACL rules exist, otherwise false. + * @param status the FileStatus containing access permissions. + * @return true if extended ACL rules exist, otherwise false. + */ + private static boolean hasExtendedAcls(HadoopFileStatus status) { + if (status.getAclStatus() != null) { + return status.getAclStatus().getEntries().size() > 0; + } + + return false; } private static void run(FsShell shell, String[] command) throws Exception {