diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 185ea95..3a045b7 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -206,7 +206,7 @@ "Query plan format serialization between client and task nodes. \n" + "Two supported values are : kryo and javaXML. Kryo is default."), SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive", - "HDFS root scratch dir for Hive jobs which gets created with 777 permission. " + + "HDFS root scratch dir for Hive jobs which gets created with write all (733) permission. " + "For each connecting user, an HDFS scratch dir: ${hive.exec.scratchdir}/ is created, " + "with ${hive.scratch.dir.permission}."), LOCALSCRATCHDIR("hive.exec.local.scratchdir", diff --git a/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java b/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java index b430cae..ffad413 100644 --- a/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java +++ b/itests/hive-unit/src/main/java/org/apache/hive/jdbc/miniHS2/MiniHS2.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.shims.HadoopShims.MiniDFSShim; import org.apache.hadoop.hive.shims.HadoopShims.MiniMrShim; import org.apache.hadoop.hive.shims.ShimLoader; @@ -50,6 +51,7 @@ public static final String HS2_HTTP_MODE = "http"; private static final String driverName = "org.apache.hive.jdbc.HiveDriver"; private static final FsPermission FULL_PERM = new FsPermission((short)00777); + private static final FsPermission WRITE_ALL_PERM = new FsPermission((short)00733); private HiveServer2 hiveServer2 = null; private final File baseDir; private final Path baseDfsDir; @@ -200,9 +202,8 @@ private MiniHS2(HiveConf hiveConf, boolean useMiniMR, boolean useMiniKdc, hiveConf.setIntVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT, getHttpPort()); Path scratchDir = new Path(baseDfsDir, "scratch"); - - // Create scratchdir with 777, so that user impersonation has no issues. - FileSystem.mkdirs(fs, scratchDir, FULL_PERM); + // Create root scratchdir with write all, so that user impersonation has no issues. + Utilities.createDirsWithPermission(hiveConf, scratchDir, WRITE_ALL_PERM, true); System.setProperty(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.toString()); hiveConf.setVar(ConfVars.SCRATCHDIR, scratchDir.toString()); diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index 241d315..e0b6558 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -388,7 +388,7 @@ public void testSessionScratchDirs() throws Exception { } /** - * Tests the creation of the root hdfs scratch dir, which should be writable by all (777). + * Tests the creation of the root hdfs scratch dir, which should be writable by all. * * @throws Exception */ @@ -410,7 +410,7 @@ public void testRootScratchDir() throws Exception { hs2Conn = getConnection(miniHS2.getJdbcURL(), userName, "password"); // FS FileSystem fs = miniHS2.getLocalFS(); - FsPermission expectedFSPermission = new FsPermission("777"); + FsPermission expectedFSPermission = new FsPermission((short)00733); // Verify scratch dir paths and permission // HDFS scratch dir scratchDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index d7227ed..324685b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -515,7 +515,7 @@ private void createSessionDirs(String userName) throws IOException { */ private Path createRootHDFSDir(HiveConf conf) throws IOException { Path rootHDFSDirPath = new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR)); - FsPermission expectedHDFSDirPermission = new FsPermission("777"); + FsPermission expectedHDFSDirPermission = new FsPermission((short)00733); FileSystem fs = rootHDFSDirPath.getFileSystem(conf); if (!fs.exists(rootHDFSDirPath)) { Utilities.createDirsWithPermission(conf, rootHDFSDirPath, expectedHDFSDirPermission, true); @@ -523,7 +523,7 @@ private Path createRootHDFSDir(HiveConf conf) throws IOException { FsPermission currentHDFSDirPermission = fs.getFileStatus(rootHDFSDirPath).getPermission(); LOG.debug("HDFS root scratch dir: " + rootHDFSDirPath + ", permission: " + currentHDFSDirPermission); - // If the root HDFS scratch dir already exists, make sure the permissions are 777. + // If the root HDFS scratch dir already exists, make sure the permissions are 733. if (!expectedHDFSDirPermission.equals(fs.getFileStatus(rootHDFSDirPath).getPermission())) { throw new RuntimeException("The root scratch dir: " + rootHDFSDirPath + " on HDFS should be writable. Current permissions are: " + currentHDFSDirPermission); @@ -650,7 +650,7 @@ private void setupAuth() { authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), conf, authenticator, authzContextBuilder.build()); - authorizerV2.applyAuthorizationConfigPolicy(conf); + authorizerV2.applyAuthorizationConfigPolicy(conf); } // create the create table grants with new config createTableGrants = CreateTableAutomaticGrant.create(conf);