diff --git a/shims/common/pom.xml b/shims/common/pom.xml
index 700bd22..19821cd 100644
--- a/shims/common/pom.xml
+++ b/shims/common/pom.xml
@@ -87,4 +87,10 @@
+
+
+ ${basedir}/src/main/java
+ ${basedir}/src/main/test
+
+
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
index 7b6a9bd..e46eaab 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java
@@ -20,9 +20,11 @@
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -64,31 +66,35 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta
}
public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus,
- String targetGroup, FileSystem fs, Path target, boolean recursion) throws IOException {
- FileStatus fStatus= sourceStatus.getFileStatus();
- String group = fStatus.getGroup();
- boolean aclEnabled = Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true");
- FsPermission sourcePerm = fStatus.getPermission();
- List aclEntries = null;
- if (aclEnabled) {
- if (sourceStatus.getAclEntries() != null) {
- LOG.trace(sourceStatus.aclStatus.toString());
- aclEntries = new ArrayList<>(sourceStatus.getAclEntries());
- removeBaseAclEntries(aclEntries);
-
- //the ACL api's also expect the tradition user/group/other permission in the form of ACL
- aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.USER, sourcePerm.getUserAction()));
- aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.GROUP, sourcePerm.getGroupAction()));
- aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.OTHER, sourcePerm.getOtherAction()));
- }
- }
+ String targetGroup, FileSystem fs, Path target, boolean recursion) throws IOException {
+ setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursion, recursion ? new FsShell() : null);
+ }
- if (recursion) {
- //use FsShell to change group, permissions, and extended ACL's recursively
- FsShell fsShell = new FsShell();
- fsShell.setConf(conf);
+ @VisibleForTesting
+ static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus,
+ String targetGroup, FileSystem fs, Path target, boolean recursion, FsShell fsShell) throws IOException {
+ try {
+ FileStatus fStatus = sourceStatus.getFileStatus();
+ String group = fStatus.getGroup();
+ boolean aclEnabled = Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true");
+ FsPermission sourcePerm = fStatus.getPermission();
+ List aclEntries = null;
+ if (aclEnabled) {
+ if (sourceStatus.getAclEntries() != null) {
+ LOG.trace(sourceStatus.getAclStatus().toString());
+ aclEntries = new ArrayList<>(sourceStatus.getAclEntries());
+ removeBaseAclEntries(aclEntries);
+
+ //the ACL api's also expect the tradition user/group/other permission in the form of ACL
+ aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.USER, sourcePerm.getUserAction()));
+ aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.GROUP, sourcePerm.getGroupAction()));
+ aclEntries.add(newAclEntry(AclEntryScope.ACCESS, AclEntryType.OTHER, sourcePerm.getOtherAction()));
+ }
+ }
- try {
+ if (recursion) {
+ //use FsShell to change group, permissions, and extended ACL's recursively
+ fsShell.setConf(conf);
//If there is no group of a file, no need to call chgrp
if (group != null && !group.isEmpty()) {
run(fsShell, new String[]{"-chgrp", "-R", group, target.toString()});
@@ -103,7 +109,7 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta
} catch (Exception e) {
LOG.info("Skipping ACL inheritance: File system for path " + target + " " +
- "does not support ACLs but dfs.namenode.acls.enabled is set to true. ");
+ "does not support ACLs but dfs.namenode.acls.enabled is set to true. ");
LOG.debug("The details are: " + e, e);
}
}
@@ -111,23 +117,25 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta
String permission = Integer.toString(sourcePerm.toShort(), 8);
run(fsShell, new String[]{"-chmod", "-R", permission, target.toString()});
}
- } catch (Exception e) {
- throw new IOException("Unable to set permissions of " + target, e);
- }
- } else {
- if (group != null && !group.isEmpty()) {
- if (targetGroup == null ||
- !group.equals(targetGroup)) {
- fs.setOwner(target, null, group);
+ } else {
+ if (group != null && !group.isEmpty()) {
+ if (targetGroup == null || !group.equals(targetGroup)) {
+ fs.setOwner(target, null, group);
+ }
}
- }
- if (aclEnabled) {
- if (null != aclEntries) {
- fs.setAcl(target, aclEntries);
+ if (aclEnabled) {
+ if (null != aclEntries) {
+ fs.setAcl(target, aclEntries);
+ }
+ } else {
+ fs.setPermission(target, sourcePerm);
}
- } else {
- fs.setPermission(target, sourcePerm);
}
+ } catch (Exception e) {
+ LOG.info(
+ "Unable to inherit permissions for file " + target + " from file " + sourceStatus.getFileStatus().getPath(),
+ e.getMessage());
+ LOG.debug("Exception while inheriting permissions", e);
}
}
@@ -197,5 +205,10 @@ public FileStatus getFileStatus() {
public List getAclEntries() {
return aclStatus == null ? null : Collections.unmodifiableList(aclStatus.getEntries());
}
+
+ @VisibleForTesting
+ AclStatus getAclStatus() {
+ return this.aclStatus;
+ }
}
}
diff --git a/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java b/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java
new file mode 100644
index 0000000..a4af55d
--- /dev/null
+++ b/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.io;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclStatus;
+import org.apache.hadoop.fs.permission.FsPermission;
+
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class TestHdfsUtils {
+
+ @Test
+ public void testSetFullFileStatusFailInheritGroup() throws IOException {
+ Configuration conf = new Configuration();
+ HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class);
+ FileStatus mockSourceStatus = mock(FileStatus.class);
+ FileSystem fs = mock(FileSystem.class);
+
+ when(mockSourceStatus.getGroup()).thenReturn("fakeGroup1");
+ when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus);
+ doThrow(RuntimeException.class).when(fs).setOwner(any(Path.class), any(String.class), any(String.class));
+
+ HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, "fakeGroup2", fs, new Path("fakePath"), false);
+ verify(fs).setOwner(any(Path.class), any(String.class), any(String.class));
+ }
+
+ @Test
+ public void testSetFullFileStatusFailInheritAcls() throws IOException {
+ Configuration conf = new Configuration();
+ conf.set("dfs.namenode.acls.enabled", "true");
+
+ HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class);
+ FileStatus mockSourceStatus = mock(FileStatus.class);
+ AclStatus mockAclStatus = mock(AclStatus.class);
+ FileSystem fs = mock(FileSystem.class);
+
+ when(mockSourceStatus.getPermission()).thenReturn(new FsPermission((short) 777));
+ when(mockAclStatus.toString()).thenReturn("");
+ when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus);
+ when(mockHadoopFileStatus.getAclEntries()).thenReturn(new ArrayList());
+ when(mockHadoopFileStatus.getAclStatus()).thenReturn(mockAclStatus);
+ doThrow(RuntimeException.class).when(fs).setAcl(any(Path.class), any(List.class));
+
+ HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, "", fs, new Path("fakePath"), false);
+ verify(fs).setAcl(any(Path.class), any(List.class));
+ }
+
+ @Test
+ public void testSetFullFileStatusFailInheritPerms() throws IOException {
+ Configuration conf = new Configuration();
+
+ HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class);
+ FileStatus mockSourceStatus = mock(FileStatus.class);
+ FileSystem fs = mock(FileSystem.class);
+
+ when(mockSourceStatus.getPermission()).thenReturn(new FsPermission((short) 777));
+ when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus);
+ doThrow(RuntimeException.class).when(fs).setPermission(any(Path.class), any(FsPermission.class));
+
+ HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, "", fs, new Path("fakePath"), false);
+ verify(fs).setPermission(any(Path.class), any(FsPermission.class));
+ }
+
+ @Test
+ public void testSetFullFileStatusFailInheritGroupRecursive() {
+ // TODO
+ }
+
+ @Test
+ public void testSetFullFileStatusFailInheritAclsRecursive() {
+ // TODO
+ }
+
+ @Test
+ public void testSetFullFileStatusFailInheritPermsRecursive() {
+ // TODO
+ }
+}