diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java index 38f9803ced..dd7400532c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java @@ -129,8 +129,12 @@ private void moveFileInDfs (Path sourcePath, Path targetPath, FileSystem fs) throw new HiveException("Unable to rename: " + sourcePath + " to: " + targetPath); } - } else if (!fs.mkdirs(targetPath)) { - throw new HiveException("Unable to make directory: " + targetPath); + } else { + boolean inheritPerms = HiveConf.getBoolVar(conf, + HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); + if (!FileUtils.mkdir(fs, targetPath, inheritPerms, conf)) { + throw new HiveException("Unable to make directory: " + targetPath); + } } } @@ -253,7 +257,9 @@ public int execute(DriverContext driverContext) { Path destPath = lmfd.getTargetDirs().get(i); FileSystem fs = destPath.getFileSystem(conf); if (!fs.exists(destPath.getParent())) { - fs.mkdirs(destPath.getParent()); + boolean inheritPerms = HiveConf.getBoolVar(conf, + HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); + FileUtils.mkdir(fs, destPath.getParent(), inheritPerms, conf); } moveFile(srcPath, destPath, isDfsDir); i++; diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 48b781de43..2f586203bc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -2758,7 +2758,7 @@ static protected void copyFiles(HiveConf conf, Path srcf, Path destf, // If we're moving files around for an ACID write then the rules and paths are all different. // You can blame this on Owen. if (isAcid) { - moveAcidFiles(srcFs, srcs, destf, newFiles); + moveAcidFiles(srcFs, srcs, destf, newFiles,conf); } else { // check that source and target paths exist List> result = checkPaths(conf, fs, srcs, srcFs, destf, false); @@ -2780,7 +2780,9 @@ static protected void copyFiles(HiveConf conf, Path srcf, Path destf, } private static void moveAcidFiles(FileSystem fs, FileStatus[] stats, Path dst, - List newFiles) throws HiveException { + List newFiles, HiveConf conf) throws HiveException { + boolean inheritPerms = + conf.getBoolVar(ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); // The layout for ACID files is table|partname/base|delta/bucket // We will always only be writing delta files. In the buckets created by FileSinkOperator // it will look like bucket/delta/bucket. So we need to move that into the above structure. @@ -2828,7 +2830,7 @@ private static void moveAcidFiles(FileSystem fs, FileStatus[] stats, Path dst, try { if (!createdDeltaDirs.contains(deltaDest)) { try { - fs.mkdirs(deltaDest); + FileUtils.mkdir(fs, deltaDest, inheritPerms, fs.getConf()); createdDeltaDirs.add(deltaDest); } catch (IOException swallowIt) { // Don't worry about this, as it likely just means it's already been created. @@ -2843,7 +2845,7 @@ private static void moveAcidFiles(FileSystem fs, FileStatus[] stats, Path dst, Path bucketDest = new Path(deltaDest, bucketSrc.getName()); LOG.info("Moving bucket " + bucketSrc.toUri().toString() + " to " + bucketDest.toUri().toString()); - fs.rename(bucketSrc, bucketDest); + FileUtils.renameWithPerms(fs, bucketSrc, bucketDest, inheritPerms, conf); if (newFiles != null) newFiles.add(bucketDest); } } catch (IOException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java index d99bbd4d90..11b5886e06 100644 --- ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java +++ ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StringableMap; import org.apache.hadoop.hive.common.ValidCompactorTxnList; @@ -770,6 +771,8 @@ public void abortTask(TaskAttemptContext taskAttemptContext) throws IOException @Override public void commitJob(JobContext context) throws IOException { JobConf conf = ShimLoader.getHadoopShims().getJobConf(context); + boolean inheritPerms = conf.getBoolean( + HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, false); Path tmpLocation = new Path(conf.get(TMP_LOCATION)); Path finalLocation = new Path(conf.get(FINAL_LOCATION)); FileSystem fs = tmpLocation.getFileSystem(conf); @@ -779,7 +782,8 @@ public void commitJob(JobContext context) throws IOException { FileStatus[] contents = fs.listStatus(tmpLocation); for (int i = 0; i < contents.length; i++) { Path newPath = new Path(finalLocation, contents[i].getPath().getName()); - fs.rename(contents[i].getPath(), newPath); + FileUtils.renameWithPerms(fs, contents[i].getPath(), + newPath, inheritPerms, conf); } fs.delete(tmpLocation, true); }