diff --git a/common/pom.xml b/common/pom.xml index e1c15ee..84bb1e5 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -157,7 +157,13 @@ commons-logging - + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + true + com.google.code.tempus-fugit diff --git a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java index e6a17cb..b7f1359 100644 --- a/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/BlobStorageUtils.java @@ -15,6 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.apache.hadoop.hive.common; import org.apache.hadoop.conf.Configuration; @@ -24,21 +25,23 @@ import java.util.Collection; + /** * Utilities for different blob (object) storage systems */ public class BlobStorageUtils { + private static final boolean DISABLE_BLOBSTORAGE_AS_SCRATCHDIR = false; public static boolean isBlobStoragePath(final Configuration conf, final Path path) { return path != null && isBlobStorageScheme(conf, path.toUri().getScheme()); } - public static boolean isBlobStorageFileSystem(final Configuration conf, final FileSystem fs) { - return fs != null && isBlobStorageScheme(conf, fs.getScheme()); + static boolean isBlobStorageFileSystem(final Configuration conf, final FileSystem fs) { + return fs != null && fs.getUri() != null && isBlobStorageScheme(conf, fs.getUri().getScheme()); } - public static boolean isBlobStorageScheme(final Configuration conf, final String scheme) { + static boolean isBlobStorageScheme(final Configuration conf, final String scheme) { Collection supportedBlobStoreSchemes = conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname); @@ -61,4 +64,5 @@ public static boolean areOptimizationsEnabled(final Configuration conf) { HiveConf.ConfVars.HIVE_BLOBSTORE_OPTIMIZATIONS_ENABLED.defaultBoolVal ); } + } diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java index e586015..8ed8cc4 100644 --- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java @@ -46,13 +46,11 @@ import org.apache.hadoop.fs.Trash; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConfUtil; import org.apache.hadoop.hive.io.HdfsUtils; import org.apache.hadoop.hive.shims.HadoopShims; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.Utils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.hive.common.util.ShutdownHookManager; import org.slf4j.Logger; @@ -509,11 +507,29 @@ public static boolean isOwnerOfFileHierarchy(FileSystem fs, FileStatus fileStatu /** * Creates the directory and all necessary parent directories. + * + * @param fs FileSystem to use + * @param f path to create. + * @param conf Hive configuration + * + * @return true if directory created successfully. False otherwise, including if it exists. + * + * @throws IOException exception in creating the directory + */ + public static boolean mkdir(FileSystem fs, Path f, Configuration conf) throws IOException { + return mkdir(fs, f, shouldInheritPerms(conf, fs), conf); + } + + /** + * Creates the directory and all necessary parent directories. + * * @param fs FileSystem to use * @param f path to create. * @param inheritPerms whether directory inherits the permission of the last-existing parent path * @param conf Hive configuration + * * @return true if directory created successfully. False otherwise, including if it exists. + * * @throws IOException exception in creating the directory */ public static boolean mkdir(FileSystem fs, Path f, boolean inheritPerms, Configuration conf) throws IOException { @@ -601,9 +617,10 @@ static boolean copy(FileSystem srcFS, Path src, copied = FileUtil.copy(srcFS, src, dstFS, dst, deleteSource, overwrite, conf); } - boolean inheritPerms = conf.getBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); + boolean inheritPerms = shouldInheritPerms(conf, dstFS); if (copied && inheritPerms) { - HdfsUtils.setFullFileStatus(conf, new HdfsUtils.HadoopFileStatus(conf, dstFS, dst.getParent()), dstFS, dst, true); + inheritPerms(conf, new HdfsUtils.HadoopFileStatus(conf, dstFS, dst.getParent()), dstFS, dst, + true); } return copied; } @@ -994,4 +1011,18 @@ public static URI getURI(String path) throws URISyntaxException { return result; } + public static boolean shouldInheritPerms(Configuration conf, FileSystem fs) { + return HiveConf.getBoolVar(conf, + HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS) && StorageUtils.shouldSetPerms(conf, fs); + } + + public static void inheritPerms(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, FileSystem fs, Path target, + boolean recursive) { + inheritPerms(conf, sourceStatus, null, fs, target, recursive); + } + + public static void inheritPerms(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, String targetGroup, + FileSystem fs, Path target, boolean recursive) { + HdfsUtils.setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursive); + } } diff --git a/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java b/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java new file mode 100644 index 0000000..900e2b1 --- /dev/null +++ b/common/src/java/org/apache/hadoop/hive/common/StorageUtils.java @@ -0,0 +1,40 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; + + +public class StorageUtils { + + /** + * Returns true if permissions should be set on the given filesystem, false otherwise. Certain implementations of + * {@link FileSystem} don't have a concept of permissions, such as S3. This method checks to determine if the given + * {@link FileSystem} falls into that category. + * + * @param conf the {@link Configuration} to use when checking if permissions should be set on the {@link FileSystem} + * @param fs the {@link FileSystem} to check to see if permission should be set or not + * + * @return true if permissions should be set on the given {@link FileSystem}, false otherwise + */ + public static boolean shouldSetPerms(Configuration conf, FileSystem fs) { + return !(BlobStorageUtils.areOptimizationsEnabled(conf) && BlobStorageUtils.isBlobStorageFileSystem(conf, fs)); + } +} diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java b/common/src/java/org/apache/hadoop/hive/io/HdfsUtils.java similarity index 84% rename from shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java rename to common/src/java/org/apache/hadoop/hive/io/HdfsUtils.java index 277738f..1b57184 100644 --- a/shims/common/src/main/java/org/apache/hadoop/hive/io/HdfsUtils.java +++ b/common/src/java/org/apache/hadoop/hive/io/HdfsUtils.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hive.common.StorageUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -49,6 +50,7 @@ import com.google.common.collect.Iterables; public class HdfsUtils { + private static final Logger LOG = LoggerFactory.getLogger("shims.HdfsUtils"); // TODO: this relies on HDFS not changing the format; we assume if we could get inode ID, this @@ -74,7 +76,9 @@ public static Path getFileIdPath( */ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, FileSystem fs, Path target, boolean recursion) { - setFullFileStatus(conf, sourceStatus, null, fs, target, recursion); + if (StorageUtils.shouldSetPerms(conf, fs)) { + setFullFileStatus(conf, sourceStatus, null, fs, target, recursion); + } } /** @@ -91,7 +95,9 @@ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileSta */ public static void setFullFileStatus(Configuration conf, HdfsUtils.HadoopFileStatus sourceStatus, String targetGroup, FileSystem fs, Path target, boolean recursion) { - setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursion, recursion ? new FsShell() : null); + if (StorageUtils.shouldSetPerms(conf, fs)) { + setFullFileStatus(conf, sourceStatus, targetGroup, fs, target, recursion, recursion ? new FsShell() : null); + } } @VisibleForTesting @@ -180,6 +186,7 @@ private static AclEntry newAclEntry(AclEntryScope scope, AclEntryType type, return new AclEntry.Builder().setScope(scope).setType(type) .setPermission(permission).build(); } + /** * Removes basic permission acls (unamed acls) from the list of acl entries * @param entries acl entries to remove from. @@ -201,39 +208,41 @@ private static void run(FsShell shell, String[] command) throws Exception { int retval = shell.run(command); LOG.debug("Return value is :" + retval); } -public static class HadoopFileStatus { - - private final FileStatus fileStatus; - private final AclStatus aclStatus; - - public HadoopFileStatus(Configuration conf, FileSystem fs, Path file) throws IOException { - - FileStatus fileStatus = fs.getFileStatus(file); - AclStatus aclStatus = null; - if (Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true")) { - //Attempt extended Acl operations only if its enabled, but don't fail the operation regardless. - try { - aclStatus = fs.getAclStatus(file); - } catch (Exception e) { - LOG.info("Skipping ACL inheritance: File system for path " + file + " " + - "does not support ACLs but dfs.namenode.acls.enabled is set to true. "); - LOG.debug("The details are: " + e, e); + + public static class HadoopFileStatus { + + private final FileStatus fileStatus; + private final AclStatus aclStatus; + + public HadoopFileStatus(Configuration conf, FileSystem fs, Path file) throws IOException { + + FileStatus fileStatus = fs.getFileStatus(file); + AclStatus aclStatus = null; + if (Objects.equal(conf.get("dfs.namenode.acls.enabled"), "true")) { + //Attempt extended Acl operations only if its enabled, but don't fail the operation regardless. + try { + aclStatus = fs.getAclStatus(file); + } catch (Exception e) { + LOG.info("Skipping ACL inheritance: File system for path " + file + " " + + "does not support ACLs but dfs.namenode.acls.enabled is set to true. "); + LOG.debug("The details are: " + e, e); + } } - }this.fileStatus = fileStatus; - this.aclStatus = aclStatus; - } + this.fileStatus = fileStatus; + this.aclStatus = aclStatus; + } - public FileStatus getFileStatus() { - return fileStatus; - } + public FileStatus getFileStatus() { + return fileStatus; + } - public List getAclEntries() { - return aclStatus == null ? null : Collections.unmodifiableList(aclStatus.getEntries()); - } + public List getAclEntries() { + return aclStatus == null ? null : Collections.unmodifiableList(aclStatus.getEntries()); + } - @VisibleForTesting - AclStatus getAclStatus() { - return this.aclStatus; + @VisibleForTesting + AclStatus getAclStatus() { + return this.aclStatus; + } } } -} diff --git a/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java b/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java index 84a0d86..918ec95 100644 --- a/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java +++ b/common/src/test/org/apache/hadoop/hive/common/TestBlobStorageUtils.java @@ -64,18 +64,18 @@ public void testValidAndInvalidFileSystems() { /* Valid FileSystem schemes */ - doReturn("s3a").when(fs).getScheme(); + doReturn(URI.create("s3a:///")).when(fs).getUri(); assertTrue(isBlobStorageFileSystem(conf, fs)); - doReturn("swift").when(fs).getScheme(); + doReturn(URI.create("swift:///")).when(fs).getUri(); assertTrue(isBlobStorageFileSystem(conf, fs)); /* Invalid FileSystem schemes */ - doReturn("hdfs").when(fs).getScheme(); + doReturn(URI.create("hdfs:///")).when(fs).getUri(); assertFalse(isBlobStorageFileSystem(conf, fs)); - doReturn("").when(fs).getScheme(); + doReturn(URI.create("")).when(fs).getUri(); assertFalse(isBlobStorageFileSystem(conf, fs)); assertFalse(isBlobStorageFileSystem(conf, null)); diff --git a/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java b/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java new file mode 100644 index 0000000..638f48c --- /dev/null +++ b/common/src/test/org/apache/hadoop/hive/common/TestStorageUtils.java @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.common; + +import java.net.URI; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hive.conf.HiveConf; + +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.assertFalse; + +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + + +public class TestStorageUtils { + + private final HiveConf conf = new HiveConf(); + + @Before + public void setUp() { + conf.set(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname, HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.getDefaultValue()); + } + + @Test + public void testShouldInheritPerms() { + conf.set(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, "true"); + FileSystem fs = mock(FileSystem.class); + for (String blobScheme : conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname)) { + when(fs.getUri()).thenReturn(URI.create(blobScheme + ":///")); + assertFalse(FileUtils.shouldInheritPerms(conf, fs)); + } + + when(fs.getUri()).thenReturn(URI.create("hdfs:///")); + assertTrue(FileUtils.shouldInheritPerms(conf, fs)); + } +} diff --git a/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java b/common/src/test/org/apache/hadoop/hive/io/TestHdfsUtils.java similarity index 90% rename from shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java rename to common/src/test/org/apache/hadoop/hive/io/TestHdfsUtils.java index 86a132c..79507e4 100644 --- a/shims/common/src/main/test/org/apache/hadoop/hive/io/TestHdfsUtils.java +++ b/common/src/test/org/apache/hadoop/hive/io/TestHdfsUtils.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.io; import java.io.IOException; +import java.net.URI; import java.util.ArrayList; import java.util.List; @@ -31,11 +32,14 @@ import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hive.conf.HiveConf; import org.junit.Test; import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyList; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -181,4 +185,19 @@ public void testSetFullFileStatusFailInheritPermsRecursive() throws Exception { true, mockFsShell); verify(mockFsShell).run(new String[]{"-chmod", "-R", any(String.class), fakeTarget.toString()}); } + + @Test + public void testSkipSetFullFileStatusIfBlobStore() throws IOException { + Configuration conf = new Configuration(); + conf.set(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname, "s3a"); + FileSystem fs = mock(FileSystem.class); + when(fs.getUri()).thenReturn(URI.create("s3a:///")); + HdfsUtils.setFullFileStatus(conf, null, null, fs, null, false); + + verify(fs, never()).getFileStatus(any(Path.class)); + verify(fs, never()).listStatus(any(Path[].class)); + verify(fs, never()).setPermission(any(Path.class), any(FsPermission.class)); + verify(fs, never()).setAcl(any(Path.class), anyList()); + verify(fs, never()).setOwner(any(Path.class), any(String.class), any(String.class)); + } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java index 27283d8..c94ef07 100755 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java @@ -33,8 +33,10 @@ import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; @@ -186,11 +188,10 @@ public static String getQualifiedName(Partition partition) { } public boolean mkdirs(Path f, boolean inheritPermCandidate) throws MetaException { - boolean inheritPerms = HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS) && inheritPermCandidate; FileSystem fs = null; try { fs = getFs(f); + boolean inheritPerms = FileUtils.shouldInheritPerms(conf, fs) && inheritPermCandidate; return FileUtils.mkdir(fs, f, inheritPerms, conf); } catch (IOException e) { MetaStoreUtils.logAndThrowMetaException(e); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index d1d2789..08bba3d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -355,9 +355,7 @@ private Path getStagingDir(Path inputPath, boolean mkdir) { if (mkdir) { try { - boolean inheritPerms = HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); - if (!FileUtils.mkdir(fs, dir, inheritPerms, conf)) { + if (!FileUtils.mkdir(fs, dir, conf)) { throw new IllegalStateException("Cannot create staging directory '" + dir.toString() + "'"); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java index cbe0aca..2683f29 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java @@ -70,8 +70,7 @@ public int execute(DriverContext driverContext) { } } - boolean inheritPerms = conf.getBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); - if (!FileUtils.mkdir(dstFs, toPath, inheritPerms, conf)) { + if (!FileUtils.mkdir(dstFs, toPath, conf)) { console.printError("Cannot make target directory: " + toPath.toString()); return 2; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 1802f37..5cf2c2b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -181,8 +181,8 @@ private Path createTargetPath(Path targetPath, FileSystem fs) throws IOException actualPath = actualPath.getParent(); } fs.mkdirs(mkDirPath); - if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS)) { - HdfsUtils.setFullFileStatus(conf, new HdfsUtils.HadoopFileStatus(conf, fs, actualPath), fs, mkDirPath, true); + if (FileUtils.shouldInheritPerms(conf, fs)) { + FileUtils.inheritPerms(conf, new HdfsUtils.HadoopFileStatus(conf, fs, actualPath), fs, mkDirPath, true); } } return deletePath; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java index 4686e2c..d2f9e79 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ReplCopyTask.java @@ -106,8 +106,7 @@ protected int execute(DriverContext driverContext) { srcFiles.addAll(Arrays.asList(srcs)); LOG.debug("ReplCopyTask numFiles:" + (srcFiles == null ? "null" : srcFiles.size())); - boolean inheritPerms = conf.getBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); - if (!FileUtils.mkdir(dstFs, toPath, inheritPerms, conf)) { + if (!FileUtils.mkdir(dstFs, toPath, conf)) { console.printError("Cannot make target directory: " + toPath.toString()); return 2; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index f64cfda..f11feb7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2889,8 +2889,7 @@ private static void copyFiles(final HiveConf conf, final FileSystem destFs, if (!fullDestStatus.getFileStatus().isDirectory()) { throw new HiveException(destf + " is not a directory."); } - final boolean inheritPerms = HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); + final boolean inheritPerms = FileUtils.shouldInheritPerms(conf, destFs); final List>> futures = new LinkedList<>(); final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ? Executors.newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25), @@ -2939,8 +2938,9 @@ private static void copyFiles(final HiveConf conf, final FileSystem destFs, Path destPath = mvFile(conf, srcFs, srcP, destFs, destf, isSrcLocal, isRenameAllowed); if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, fullDestStatus, srcGroup, destFs, destPath, false); + FileUtils.inheritPerms(conf, fullDestStatus, srcGroup, destFs, destPath, false); } + if (null != newFiles) { newFiles.add(destPath); } @@ -2952,7 +2952,7 @@ private static void copyFiles(final HiveConf conf, final FileSystem destFs, } if (null == pool) { if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, fullDestStatus, null, destFs, destf, true); + FileUtils.inheritPerms(conf, fullDestStatus, destFs, destf, true); } } else { pool.shutdown(); @@ -3105,8 +3105,7 @@ public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, } //needed for perm inheritance. - final boolean inheritPerms = HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); + final boolean inheritPerms = FileUtils.shouldInheritPerms(conf, destFs); HdfsUtils.HadoopFileStatus destStatus = null; // If source path is a subdirectory of the destination path: @@ -3118,7 +3117,7 @@ public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, boolean destIsSubDir = isSubDir(srcf, destf, srcFs, destFs, isSrcLocal); try { if (inheritPerms || replace) { - try{ + try { destStatus = new HdfsUtils.HadoopFileStatus(conf, destFs, destf); //if destf is an existing directory: //if replace is true, delete followed by rename(mv) is equivalent to replace @@ -3142,7 +3141,7 @@ public static boolean moveFile(final HiveConf conf, Path srcf, final Path destf, // For local src file, copy to hdfs destFs.copyFromLocalFile(srcf, destf); if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, destStatus, destFs, destf, true); + FileUtils.inheritPerms(conf, destStatus, destFs, destf, true); } return true; } else { @@ -3178,7 +3177,7 @@ public Void call() throws Exception { final String group = srcStatus.getGroup(); if(destFs.rename(srcStatus.getPath(), destFile)) { if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, desiredStatus, group, destFs, destFile, false); + FileUtils.inheritPerms(conf, desiredStatus, group, destFs, destFile, false); } } else { throw new IOException("rename for src path: " + srcStatus.getPath() + " to dest path:" @@ -3191,7 +3190,7 @@ public Void call() throws Exception { } if (null == pool) { if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, desiredStatus, null, destFs, destf, true); + FileUtils.inheritPerms(conf, desiredStatus, destFs, destf, true); } } else { pool.shutdown(); @@ -3209,7 +3208,7 @@ public Void call() throws Exception { } else { if (destFs.rename(srcf, destf)) { if (inheritPerms) { - HdfsUtils.setFullFileStatus(conf, destStatus, destFs, destf, true); + FileUtils.inheritPerms(conf, destStatus, destFs, destf, true); } return true; } @@ -3261,12 +3260,10 @@ static protected boolean needToCopy(Path srcf, Path destf, FileSystem srcFs, Fil */ static protected void copyFiles(HiveConf conf, Path srcf, Path destf, FileSystem fs, boolean isSrcLocal, boolean isAcid, List newFiles) throws HiveException { - boolean inheritPerms = HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); try { // create the destination if it does not exist if (!fs.exists(destf)) { - FileUtils.mkdir(fs, destf, inheritPerms, conf); + FileUtils.mkdir(fs, destf, conf); } } catch (IOException e) { throw new HiveException( @@ -3460,9 +3457,7 @@ protected void replaceFiles(Path tablePath, Path srcf, Path destf, Path oldPath, // first call FileUtils.mkdir to make sure that destf directory exists, if not, it creates // destf with inherited permissions - boolean inheritPerms = HiveConf.getBoolVar(conf, HiveConf.ConfVars - .HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); - boolean destfExist = FileUtils.mkdir(destFs, destf, inheritPerms, conf); + boolean destfExist = FileUtils.mkdir(destFs, destf, conf); if(!destfExist) { throw new IOException("Directory " + destf.toString() + " does not exist and could not be created.");