diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index de5504498d..5bd8c0d877 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -138,7 +138,12 @@ public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPri
@Override
public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
throws HiveException, AuthorizationException {
- Path path = getDbLocation(db);
+ Path path;
+ try {
+ path = wh.getDefaultExternalDatabasePath(db.getName());
+ } catch (MetaException ex) {
+ throw hiveException(ex);
+ }
// extract drop privileges
DropPrivilegeExtractor privExtractor = new DropPrivilegeExtractor(readRequiredPriv,
diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml
index 67d8fb41d1..baa6b3db4c 100644
--- a/standalone-metastore/pom.xml
+++ b/standalone-metastore/pom.xml
@@ -45,6 +45,7 @@
${project.basedir}/src/test/resources
${project.build.directory}/tmp
${project.build.directory}/warehouse
+ ${project.build.directory}/external
file://
1
true
@@ -580,8 +581,10 @@
+
+
@@ -745,6 +748,7 @@
false
${test.tmp.dir}
${test.warehouse.scheme}${test.warehouse.dir}
+ ${test.warehouse.scheme}${test.warehouse.external.dir}
${log4j.conf.dir}
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 1327fa267f..feb856e097 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -31,6 +31,7 @@
import static org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.prependNotNullCatToDbName;
import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
@@ -77,6 +78,7 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.metastore.api.*;
@@ -198,7 +200,8 @@
public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStore.class);
public static final String PARTITION_NUMBER_EXCEED_LIMIT_MSG =
"Number of partitions scanned (=%d) on table '%s' exceeds limit (=%d). This is controlled on the metastore server by %s.";
-
+ private static final FsPermission EXTERNAL_DB_DIRECTORIES_PERMISSIONS
+ = FsPermission.createImmutable((short)0700);
// boolean that tells if the HiveMetaStore (remote) server is being used.
// Can be used to determine if the calls to metastore api (HMSHandler) are being made with
// embedded metastore or a remote one
@@ -747,6 +750,7 @@ private void createDefaultDB_core(RawStore ms) throws MetaException, InvalidObje
try {
ms.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME);
} catch (NoSuchObjectException e) {
+
Database db = new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null);
db.setOwnerName(PUBLIC);
@@ -1255,20 +1259,64 @@ private void create_database_core(RawStore ms, final Database db)
throw new InvalidObjectException("No such catalog " + db.getCatalogName());
}
Path dbPath = wh.determineDatabasePath(cat, db);
+ Path dbExternalPath = wh.getDefaultExternalDatabasePath(db.getName());
db.setLocationUri(dbPath.toString());
boolean success = false;
- boolean madeDir = false;
+ boolean madeManagedDir = false;
+ boolean madeExternalDir = false;
Map transactionalListenersResponses = Collections.emptyMap();
try {
firePreEvent(new PreCreateDatabaseEvent(db, this));
- if (!wh.isDir(dbPath)) {
- LOG.debug("Creating database path " + dbPath);
- if (!wh.mkdirs(dbPath)) {
- throw new MetaException("Unable to create database path " + dbPath +
- ", failed to create database " + db.getName());
+
+
+ try {
+ // Since this may be done as random user (if doAs=true) he may not have access
+ // to the managed directory. We run this as an admin user
+ madeManagedDir = UserGroupInformation.getLoginUser().doAs(
+ new PrivilegedExceptionAction() {
+ @Override
+ public Boolean run() throws MetaException {
+ if (!wh.isDir(dbPath)) {
+ LOG.info("Creating database path in managed directory " + dbPath);
+ if (!wh.mkdirs(dbPath)) {
+ throw new MetaException("Unable to create database managed path " + dbPath +
+ ", failed to create database " + db.getName());
+ }
+ return true;
+ }
+ return false;
+ }
+ });
+ if (madeManagedDir) {
+ LOG.info("Created database path in managed directory " + dbPath);
}
- madeDir = true;
+ } catch (IOException | InterruptedException e) {
+ throw new MetaException("Unable to create database managed directory " + dbPath +
+ ", failed to create database " + db.getName());
+ }
+
+ try {
+ madeExternalDir = UserGroupInformation.getCurrentUser().doAs(
+ new PrivilegedExceptionAction() {
+ @Override
+ public Boolean run() throws MetaException {
+ if (!wh.isDir(dbExternalPath)) {
+ LOG.info("Creating database path in external directory " + dbExternalPath);
+ return wh.mkdirs(dbExternalPath, EXTERNAL_DB_DIRECTORIES_PERMISSIONS);
+ }
+ return false;
+ }
+ });
+ if (madeExternalDir) {
+ LOG.info("Created database path in external directory " + dbPath);
+ } else {
+ LOG.warn("Failed to create external path " + dbExternalPath + " for database " + db.getName()
+ + ". Access won't be allowed if the StorageBasedAuthorizationProvider is enabled ");
+ }
+ } catch (IOException | InterruptedException | UndeclaredThrowableException e) {
+ LOG.warn("Failed to create external path " + dbExternalPath + " for database " + db.getName()
+ + ". Access won't be allowed if the StorageBasedAuthorizationProvider is enabled ", e);
}
ms.openTransaction();
@@ -1285,8 +1333,37 @@ private void create_database_core(RawStore ms, final Database db)
} finally {
if (!success) {
ms.rollbackTransaction();
- if (madeDir) {
- wh.deleteDir(dbPath, true, db);
+
+ if (madeManagedDir) {
+ try {
+ UserGroupInformation.getLoginUser().doAs(
+ new PrivilegedExceptionAction() {
+ @Override
+ public Void run() throws Exception {
+ wh.deleteDir(dbPath, true, db);
+ return null;
+ }
+ });
+ } catch (IOException | InterruptedException e) {
+ LOG.error("Couldn't delete managed directory " + dbPath + " after "
+ + "it was created for database " + db.getName());
+ }
+ }
+
+ if (madeExternalDir) {
+ try {
+ UserGroupInformation.getCurrentUser().doAs(
+ new PrivilegedExceptionAction() {
+ @Override
+ public Void run() throws Exception {
+ wh.deleteDir(dbExternalPath, true, db);
+ return null;
+ }
+ });
+ } catch (IOException | InterruptedException e) {
+ LOG.warn("Couldn't delete external directory " + dbExternalPath + " after "
+ + "it was created for database " + db.getName());
+ }
}
}
@@ -1588,12 +1665,35 @@ private void drop_database_core(RawStore ms, String catName,
}
// Delete the data in the database
try {
- wh.deleteDir(new Path(db.getLocationUri()), true, db);
- } catch (Exception e) {
- LOG.error("Failed to delete database directory: " + db.getLocationUri() +
- " " + e.getMessage());
+ final Database dbFinal = db;
+ Boolean deleted = UserGroupInformation.getLoginUser().doAs(
+ new PrivilegedExceptionAction() {
+ @Override
+ public Boolean run() throws MetaException {
+ return wh.deleteDir(new Path(dbFinal.getLocationUri()), true, dbFinal);
+ }
+ });
+ if (!deleted) {
+ LOG.warn("Failed to delete database folder " + db.getLocationUri());
+ }
+ } catch (IOException | InterruptedException | UndeclaredThrowableException e) {
+ LOG.warn("Might have failed to delete database folder " + db.getLocationUri());
}
// it is not a terrible thing even if the data is not deleted
+
+ try {
+ final Path externalPath = wh.getDefaultExternalDatabasePath(db.getName());
+ Boolean deleted = UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction() {
+ @Override public Boolean run() throws IOException, MetaException {
+ return wh.deleteDirIfEmpty(externalPath);
+ }
+ });
+ if (!deleted) {
+ LOG.warn("Failed to delete database external folder " + externalPath);
+ }
+ } catch (IOException | InterruptedException e) {
+ LOG.warn("Might have failed to delete database external folder got database " + db.getName());
+ }
}
if (!listeners.isEmpty()) {
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
index da5a71cc64..dca9ccb726 100755
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -31,6 +31,7 @@
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.metastore.api.Catalog;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
@@ -318,6 +319,17 @@ public boolean mkdirs(Path f) throws MetaException {
return false;
}
+ public boolean mkdirs(Path f, FsPermission permission) throws MetaException {
+ FileSystem fs;
+ try {
+ fs = getFs(f);
+ return FileUtils.mkdir(fs, f, permission);
+ } catch (IOException e) {
+ MetaStoreUtils.logAndThrowMetaException(e);
+ }
+ return false;
+ }
+
public boolean renameDir(Path sourcePath, Path destPath, boolean needCmRecycle) throws MetaException {
try {
if (needCmRecycle) {
@@ -342,6 +354,16 @@ void addToChangeManagement(Path file) throws MetaException {
}
}
+ public boolean deleteDirIfEmpty(Path f) throws MetaException, IOException {
+ FileSystem fs = getFs(f);
+ if (FileUtils.isDirEmpty(fs, f)) {
+ return deleteDir(f, false, false, false);
+ } else {
+ LOG.info("Will not delete external directory " + f + " since it's not empty");
+ }
+ return true;
+ }
+
public boolean deleteDir(Path f, boolean recursive, Database db) throws MetaException {
return deleteDir(f, recursive, false, db);
}
diff --git a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java
index 963e12f9d8..02a4c12297 100644
--- a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java
+++ b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/utils/FileUtils.java
@@ -26,14 +26,18 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
@@ -168,6 +172,20 @@ public static boolean mkdir(FileSystem fs, Path f) throws IOException {
return fs.mkdirs(f);
}
+ /**
+ * Creates the directory and all necessary parent directories.
+ * @param fs FileSystem to use
+ * @param f path to create.
+ * @param fsPermission permissions to use.
+ * @return true if directory created successfully. False otherwise, including if it exists.
+ * @throws IOException exception in creating the directory
+ */
+ public static boolean mkdir(FileSystem fs, Path f, FsPermission fsPermission) throws IOException {
+ LOG.info("Creating directory if it doesn't exist: " + f
+ + " , with permissions: " + fsPermission);
+ return fs.mkdirs(f, fsPermission);
+ }
+
/**
* Rename a file. Unlike {@link FileSystem#rename(Path, Path)}, if the destPath already exists
* and is a directory, this will NOT move the sourcePath into it. It will throw an IOException
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index cb32236d54..8783e94ab8 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.metastore;
+import java.io.FileNotFoundException;
import java.lang.reflect.Field;
import java.io.IOException;
import java.sql.Connection;
@@ -40,6 +41,7 @@
import java.util.concurrent.TimeUnit;
import com.google.common.collect.Sets;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hive.metastore.api.CreationMetadata;
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
import org.apache.hadoop.hive.metastore.client.builder.TableBuilder;
@@ -47,6 +49,7 @@
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
import org.apache.hadoop.hive.metastore.utils.FileUtils;
import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
+import org.apache.hadoop.security.UserGroupInformation;
import org.datanucleus.api.jdo.JDOPersistenceManager;
import org.datanucleus.api.jdo.JDOPersistenceManagerFactory;
import org.junit.Assert;
@@ -1069,6 +1072,49 @@ public void testDatabaseLocationWithPermissionProblems() throws Exception {
assertTrue("Database creation succeeded even with permission problem", createFailed);
}
+ @Test
+ public void testExternalDirectory() throws Exception{
+ String externalDirString = MetastoreConf.getVar(conf, ConfVars.WAREHOUSE_EXTERNAL);
+ Path externalDir = new Path(externalDirString);
+ silentDropDatabase(TEST_DB1_NAME);
+
+ String dbLocation =
+ MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/test/_testDB_create_";
+
+ String dbExternalLocation = externalDirString + "/testdb1.db";
+
+
+ FileSystem fs = FileSystem.get(new Path(dbLocation).toUri(), conf);
+ fs.mkdirs(
+ new Path(MetastoreConf.getVar(conf, ConfVars.WAREHOUSE)),
+ new FsPermission((short) 700));
+
+ fs.mkdirs(externalDir, new FsPermission((short) 0777));
+
+ Database db = new DatabaseBuilder()
+ .setName(TEST_DB1_NAME)
+ .setLocation(dbLocation)
+ .build(conf);
+ client.createDatabase(db);
+ FileStatus fileStatus = fs.getFileStatus(new Path(dbExternalLocation));
+
+ assertTrue("External folder should have been created", fileStatus.isDirectory());
+ assertEquals("External folder should have the right permissions", new FsPermission((short) 0700),
+ fileStatus.getPermission());
+ assertEquals("External folder should be owned by the right username",
+ UserGroupInformation.getCurrentUser().getShortUserName(), fileStatus.getOwner());
+ client.dropDatabase(db.getName());
+
+ try {
+ fs.getFileStatus(new Path(dbExternalLocation));
+ fail("External directory should have been deleted");
+ } catch (FileNotFoundException e ) {}
+ finally {
+ fs.delete(new Path(MetastoreConf.getVar(conf, ConfVars.WAREHOUSE) + "/test"), true);
+ fs.delete(new Path(MetastoreConf.getVar(conf, ConfVars.WAREHOUSE_EXTERNAL) + "/test"), true);
+ }
+ }
+
@Test
public void testDatabaseLocation() throws Throwable {
try {
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestAddPartitions.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestAddPartitions.java
index a15f5ea045..54302caa6f 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestAddPartitions.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/client/TestAddPartitions.java
@@ -523,7 +523,7 @@ public void testAddPartitionForExternalTableNullLocation() throws Exception {
client.getPartition(DB_NAME, tableName, Lists.newArrayList(DEFAULT_YEAR_VALUE));
Assert.assertNotNull(resultPart);
Assert.assertNotNull(resultPart.getSd());
- String defaultTableLocation = metaStore.getWarehouseRoot() + "/" + DB_NAME + ".db/" + tableName;
+ String defaultTableLocation = metaStore.getExternalWarehouseRoot() + "/" + DB_NAME + ".db/" + tableName;
String defaulPartitionLocation = defaultTableLocation + "/year=2017";
Assert.assertEquals(defaulPartitionLocation, resultPart.getSd().getLocation());
}
@@ -1197,7 +1197,7 @@ public void testAddPartitionsForExternalTableNullLocation() throws Exception {
Lists.newArrayList("year=2017", "year=2018"));
Assert.assertNotNull(resultParts);
Assert.assertEquals(2, resultParts.size());
- String defaultTableLocation = metaStore.getWarehouseRoot() + "/" + DB_NAME + ".db/" + tableName;
+ String defaultTableLocation = metaStore.getExternalWarehouseRoot() + "/" + DB_NAME + ".db/" + tableName;
String defaultPartLocation1 = defaultTableLocation + "/year=2017";
String defaultPartLocation2 = defaultTableLocation + "/year=2018";
if (resultParts.get(0).getValues().get(0).equals("2017")) {
diff --git a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/minihms/AbstractMetaStoreService.java b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/minihms/AbstractMetaStoreService.java
index 709085d71f..5e9fdca35c 100644
--- a/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/minihms/AbstractMetaStoreService.java
+++ b/standalone-metastore/src/test/java/org/apache/hadoop/hive/metastore/minihms/AbstractMetaStoreService.java
@@ -113,6 +113,16 @@ public Path getWarehouseRoot() throws MetaException {
return warehouse.getWhRoot();
}
+ /**
+ * Returns the External MetaStore Warehouse root directory name.
+ *
+ * @return The external warehouse root directory
+ * @throws MetaException IO failure
+ */
+ public Path getExternalWarehouseRoot() throws MetaException {
+ return warehouse.getWhRootExternal();
+ }
+
/**
* Check if a path exists.
*