diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index cc529d5..134b410 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -602,7 +602,6 @@ public class HiveConf extends Configuration {
SEMANTIC_ANALYZER_HOOK("hive.semantic.analyzer.hook", ""),
HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false),
- HIVE_METASTORE_AUTHORIZATION_ENABLED("hive.security.metastore.authorization.enabled", false),
HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
"org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"),
HIVE_AUTHENTICATOR_MANAGER("hive.security.authenticator.manager",
diff --git a/conf/hive-default.xml.template b/conf/hive-default.xml.template
index e9aa1cb..26dce19 100644
--- a/conf/hive-default.xml.template
+++ b/conf/hive-default.xml.template
@@ -1250,12 +1250,6 @@
- hive.security.metastore.authorization.enabled
- false
- enable or disable hive metastore-side authorization
-
-
-
hive.security.authorization.manager
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider
the hive client authorization manager class name.
@@ -1266,7 +1260,7 @@
hive.security.metastore.authorization.manager
org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider
- the hive client authorization manager class name to be used in the metastore for authorization.
+ authorization manager class name to be used in the metastore for authorization.
The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.
@@ -1281,7 +1275,7 @@
hive.security.metastore.authenticator.manager
org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator
- hive client authenticator manager class name to be used in the metastore for authentication.
+ authenticator manager class name to be used in the metastore for authentication.
The user defined authenticator should implement interface org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider.
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index c80aed1..eccc876 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -59,7 +59,6 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -87,6 +86,7 @@ import org.apache.hadoop.hive.metastore.api.Type;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
@@ -401,7 +401,7 @@ public class HiveMetaStore extends ThriftHiveMetastore {
} catch (NoSuchObjectException e) {
ms.createDatabase(
new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
- getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
+ wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
}
HMSHandler.createDefaultDB = true;
}
@@ -519,22 +519,13 @@ public class HiveMetaStore extends ThriftHiveMetastore {
return counters;
}
- private static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
-
- private Path getDefaultDatabasePath(String dbName) throws MetaException {
- if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
- return wh.getWhRoot();
- }
- return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX);
- }
-
private void create_database_core(RawStore ms, final Database db)
throws AlreadyExistsException, InvalidObjectException, MetaException {
if (!validateName(db.getName())) {
throw new InvalidObjectException(db.getName() + " is not a valid database name");
}
if (null == db.getLocationUri()) {
- db.setLocationUri(getDefaultDatabasePath(db.getName()).toString());
+ db.setLocationUri(wh.getDefaultDatabasePath(db.getName()).toString());
} else {
db.setLocationUri(wh.getDnsPath(new Path(db.getLocationUri())).toString());
}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index b35efc0..d857004 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -39,12 +39,12 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -66,6 +66,8 @@ public class MetaStoreUtils {
public static final String DEFAULT_DATABASE_NAME = "default";
public static final String DEFAULT_DATABASE_COMMENT = "Default Hive database";
+ public static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
+
/**
* printStackTrace
*
@@ -1029,4 +1031,5 @@ public class MetaStoreUtils {
throw new MetaException(rawStoreClassName + " class not found");
}
}
+
}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
index 9f3b9ff..b2cd839 100755
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hive.metastore;
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DATABASE_WAREHOUSE_SUFFIX;
import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
import java.io.FileNotFoundException;
@@ -164,6 +165,14 @@ public class Warehouse {
return new Path(db.getLocationUri());
}
+ public Path getDefaultDatabasePath(String dbName) throws MetaException {
+ if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
+ return getWhRoot();
+ }
+ return new Path(getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX);
+ }
+
+
public Path getTablePath(Database db, String tableName)
throws MetaException {
return getDnsPath(new Path(getDatabasePath(db), tableName.toLowerCase()));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java
index a4607dd..631417f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/HiveMetastoreAuthenticationProvider.java
@@ -22,8 +22,11 @@ import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
/**
- * HiveAuthenticationProvider is an interface for authentication. The
- * implementation should return userNames and groupNames.
+ * HiveMetastoreAuthenticationProvider is an interface extension
+ * from HiveAuthenticationProvider for authentication from the
+ * metastore side. The implementation should return userNames
+ * and groupNames, and take care that if the metastore is running
+ * a particular command as a user, it returns that user.
*/
public interface HiveMetastoreAuthenticationProvider extends HiveAuthenticationProvider{
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
index 29e27c6..dc56e2e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationPreEventListener.java
@@ -46,37 +46,35 @@ import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider;
+/**
+ * AuthorizationPreEventListener : A MetaStorePreEventListener that
+ * performs authorization/authentication checks on the metastore-side.
+ *
+ * Note that this can only perform authorization checks on defined
+ * metastore PreEventContexts, such as the adding/dropping and altering
+ * of databases, tables and partitions.
+ */
public class AuthorizationPreEventListener extends MetaStorePreEventListener {
public static final Log LOG = LogFactory.getLog(
AuthorizationPreEventListener.class);
private static HiveConf conf;
- private static boolean doAuth;
private static HiveMetastoreAuthorizationProvider authorizer;
private static HiveMetastoreAuthenticationProvider authenticator;
public AuthorizationPreEventListener(Configuration config) throws HiveException {
super(config);
- conf = new HiveConf(config, AuthorizationPreEventListener.class);
-
- doAuth = conf.getBoolVar(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED);
-
- if (doAuth) {
- authenticator = (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator(
- conf, HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER);
- authorizer = (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager(
- conf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, authenticator);
- }
+ authenticator = (HiveMetastoreAuthenticationProvider) HiveUtils.getAuthenticator(
+ config, HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER);
+ authorizer = (HiveMetastoreAuthorizationProvider) HiveUtils.getAuthorizeProviderManager(
+ config, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER, authenticator);
}
@Override
public void onEvent(PreEventContext context) throws MetaException, NoSuchObjectException,
InvalidOperationException {
- if (!doAuth) {
- return; // Metastore-side auth was not turned on, we simply return.
- }
authenticator.setMetaStoreHandler(context.getHandler());
authorizer.setMetaStoreHandler(context.getHandler());
@@ -122,9 +120,9 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(),
HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges());
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -135,9 +133,9 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
HiveOperation.DROPDATABASE.getInputRequiredPrivileges(),
HiveOperation.DROPDATABASE.getOutputRequiredPrivileges());
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -148,9 +146,9 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
HiveOperation.CREATETABLE.getInputRequiredPrivileges(),
HiveOperation.CREATETABLE.getOutputRequiredPrivileges());
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -161,9 +159,9 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
HiveOperation.DROPTABLE.getInputRequiredPrivileges(),
HiveOperation.DROPTABLE.getOutputRequiredPrivileges());
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -174,9 +172,9 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
null,
new Privilege[]{Privilege.ALTER_METADATA});
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -188,11 +186,11 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
HiveOperation.ALTERTABLE_ADDPARTS.getInputRequiredPrivileges(),
HiveOperation.ALTERTABLE_ADDPARTS.getOutputRequiredPrivileges());
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (NoSuchObjectException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -204,11 +202,11 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
HiveOperation.ALTERTABLE_DROPPARTS.getInputRequiredPrivileges(),
HiveOperation.ALTERTABLE_DROPPARTS.getOutputRequiredPrivileges());
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (NoSuchObjectException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
@@ -220,16 +218,15 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
null,
new Privilege[]{Privilege.ALTER_METADATA});
} catch (AuthorizationException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (NoSuchObjectException e) {
- throw new InvalidOperationException(e.getMessage());
+ throw invalidOperationException(e);
} catch (HiveException e) {
- throw new MetaException(e.getMessage());
+ throw metaException(e);
}
}
-
- public Table getTableFromApiTable(org.apache.hadoop.hive.metastore.api.Table apiTable) {
+ private Table getTableFromApiTable(org.apache.hadoop.hive.metastore.api.Table apiTable) {
org.apache.hadoop.hive.metastore.api.Table tTable = apiTable.deepCopy();
if (tTable.getTableType() == null){
// TableType specified was null, we need to figure out what type it was.
@@ -247,7 +244,7 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
return tbl;
}
- public Partition getPartitionFromApiPartition(
+ private Partition getPartitionFromApiPartition(
org.apache.hadoop.hive.metastore.api.Partition mapiPart,
PreEventContext context) throws HiveException, NoSuchObjectException, MetaException {
org.apache.hadoop.hive.metastore.api.Partition tPart = mapiPart.deepCopy();
@@ -259,5 +256,16 @@ public class AuthorizationPreEventListener extends MetaStorePreEventListener {
return new Partition(getTableFromApiTable(t),tPart);
}
+ private InvalidOperationException invalidOperationException(Exception e) {
+ InvalidOperationException ex = new InvalidOperationException(e.getMessage());
+ ex.initCause(e.getCause());
+ return ex;
+ }
+
+ private MetaException metaException(HiveException e) {
+ MetaException ex = new MetaException(e.getMessage());
+ ex.initCause(e);
+ return ex;
+ }
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index ab4bc05..da166e4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hive.ql.security.authorization;
-import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
-
import java.io.FileNotFoundException;
import java.io.IOException;
import java.security.AccessControlException;
@@ -46,7 +44,6 @@ import org.apache.hadoop.hive.ql.metadata.Table;
public class StorageBasedAuthorizationProvider extends HiveAuthorizationProviderBase
implements HiveMetastoreAuthorizationProvider {
- private static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
private Warehouse wh;
@Override
@@ -74,10 +71,6 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
@Override
public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
throws HiveException, AuthorizationException {
- if (db == null) {
- return;
- }
-
Path path = getDbLocation(db);
authorize(path, readRequiredPriv, writeRequiredPriv);
}
@@ -86,20 +79,16 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
public void authorize(Table table, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
throws HiveException, AuthorizationException {
- if (table == null) {
- return;
- }
-
Path path = null;
try {
- if (table.getTTable().getSd().getLocation() == null
- || table.getTTable().getSd().getLocation().isEmpty()) {
+ String location = table.getTTable().getSd().getLocation();
+ if (location == null || location.isEmpty()) {
path = wh.getTablePath(hive_db.getDatabase(table.getDbName()), table.getTableName());
} else {
- path = table.getPath();
+ path = new Path(location);
}
} catch (MetaException ex) {
- throw new HiveException(ex);
+ throw hiveException(ex);
}
authorize(path, readRequiredPriv, writeRequiredPriv);
@@ -108,17 +97,14 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
@Override
public void authorize(Partition part, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
throws HiveException, AuthorizationException {
- if (part == null) {
- return;
- }
authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv);
}
- public void authorize(Table table, Partition part, Privilege[] readRequiredPriv,
+ private void authorize(Table table, Partition part, Privilege[] readRequiredPriv,
Privilege[] writeRequiredPriv)
throws HiveException, AuthorizationException {
- if (part == null || part.getLocation() == null) {
+ if (part.getLocation() == null) {
authorize(table, readRequiredPriv, writeRequiredPriv);
} else {
authorize(part.getPartitionPath(), readRequiredPriv, writeRequiredPriv);
@@ -133,10 +119,6 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
// living in different files, so we do simple partition-auth and ignore
// the columns parameter.
- if (part == null) {
- return;
- }
-
authorize(part.getTable(), part, readRequiredPriv, writeRequiredPriv);
}
@@ -215,11 +197,11 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
checkPermissions(getConf(), path, actions);
} catch (AccessControlException ex) {
- throw new AuthorizationException(ex);
+ throw authorizationException(ex);
} catch (LoginException ex) {
- throw new AuthorizationException(ex);
+ throw authorizationException(ex);
} catch (IOException ex) {
- throw new HiveException(ex);
+ throw hiveException(ex);
}
}
@@ -273,7 +255,7 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
return;
} catch (org.apache.hadoop.fs.permission.AccessControlException ace) {
// Older hadoop version will throw this @deprecated Exception.
- throw new AccessControlException(ace.getMessage());
+ throw accessControlException(ace);
}
final FsPermission dirPerms = stat.getPermission();
@@ -298,25 +280,36 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
}
}
- private Path getDefaultDatabasePath(String dbName) throws MetaException {
- if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
- return wh.getWhRoot();
- }
- return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX);
- }
-
protected Path getDbLocation(Database db) throws HiveException {
try {
String location = db.getLocationUri();
if (location == null) {
- return getDefaultDatabasePath(db.getName());
+ return wh.getDefaultDatabasePath(db.getName());
} else {
return wh.getDnsPath(wh.getDatabasePath(db));
}
} catch (MetaException ex) {
- throw new HiveException(ex.getMessage());
+ throw hiveException(ex);
}
}
+ private HiveException hiveException(Exception e) {
+ HiveException ex = new HiveException(e);
+ ex.initCause(e);
+ return ex;
+ }
+
+ private AuthorizationException authorizationException(Exception e) {
+ AuthorizationException ex = new AuthorizationException(e);
+ ex.initCause(e);
+ return ex;
+ }
+
+ private static AccessControlException accessControlException(
+ org.apache.hadoop.fs.permission.AccessControlException e) {
+ AccessControlException ace = new AccessControlException(e.getMessage());
+ ace.initCause(e);
+ return ace;
+ }
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java b/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
index 55e07f5..535cdbe 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/security/TestAuthorizationPreEventListener.java
@@ -18,6 +18,8 @@
package org.apache.hadoop.hive.ql.security;
+import java.io.IOException;
+import java.net.ServerSocket;
import java.util.ArrayList;
import java.util.List;
@@ -41,7 +43,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
* {@link org.apache.hadoop.hive.metastore.MetaStorePreEventListener}
*/
public class TestAuthorizationPreEventListener extends TestCase {
- private static final String msPort = "20001";
+ private static String msPort;
private HiveConf clientHiveConf;
private HiveMetaStoreClient msc;
private Driver driver;
@@ -63,9 +65,11 @@ public class TestAuthorizationPreEventListener extends TestCase {
protected void setUp() throws Exception {
super.setUp();
+
+ msPort = getFreeAvailablePort();
+
System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname,
AuthorizationPreEventListener.class.getName());
- System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED.varname,"true");
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
DummyHiveMetastoreAuthorizationProvider.class.getName());
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
@@ -91,6 +95,14 @@ public class TestAuthorizationPreEventListener extends TestCase {
driver = new Driver(clientHiveConf);
}
+ private static String getFreeAvailablePort() throws IOException {
+ ServerSocket socket = new ServerSocket(0);
+ socket.setReuseAddress(true);
+ int port = socket.getLocalPort();
+ socket.close();
+ return "" + port;
+ }
+
@Override
protected void tearDown() throws Exception {
super.tearDown();
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java b/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java
index 8433f8f..d8ee026 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/security/TestDefaultHiveMetastoreAuthorizationProvider.java
@@ -18,7 +18,10 @@
package org.apache.hadoop.hive.ql.security;
+import java.io.IOException;
+import java.net.ServerSocket;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
import junit.framework.TestCase;
@@ -28,12 +31,18 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener;
import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveMetastoreAuthorizationProvider;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.security.UserGroupInformation;
@@ -48,7 +57,7 @@ import org.apache.hadoop.security.UserGroupInformation;
* turn on server-side auth.
*/
public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase {
- private static final String msPort = "20001";
+ private static String msPort;
private HiveConf clientHiveConf;
private HiveMetaStoreClient msc;
private Driver driver;
@@ -71,9 +80,11 @@ public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase {
protected void setUp() throws Exception {
super.setUp();
+
+ msPort = getFreeAvailablePort();
+
System.setProperty(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname,
AuthorizationPreEventListener.class.getName());
- System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_ENABLED.varname,"true");
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER.varname,
DefaultHiveMetastoreAuthorizationProvider.class.getName());
System.setProperty(HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER.varname,
@@ -103,6 +114,14 @@ public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase {
driver = new Driver(clientHiveConf);
}
+ private static String getFreeAvailablePort() throws IOException {
+ ServerSocket socket = new ServerSocket(0);
+ socket.setReuseAddress(true);
+ int port = socket.getLocalPort();
+ socket.close();
+ return "" + port;
+ }
+
@Override
protected void tearDown() throws Exception {
super.tearDown();
@@ -137,6 +156,35 @@ public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase {
assertEquals(1,ret.getResponseCode());
// failure from not having permissions to create table
+ ArrayList fields = new ArrayList(2);
+ fields.add(new FieldSchema("a", serdeConstants.STRING_TYPE_NAME, ""));
+
+ Table ttbl = new Table();
+ ttbl.setDbName(dbName);
+ ttbl.setTableName(tblName);
+ StorageDescriptor sd = new StorageDescriptor();
+ ttbl.setSd(sd);
+ sd.setCols(fields);
+ sd.setParameters(new HashMap());
+ sd.getParameters().put("test_param_1", "Use this for comments etc");
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(ttbl.getTableName());
+ sd.getSerdeInfo().setParameters(new HashMap());
+ sd.getSerdeInfo().getParameters().put(
+ org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
+ sd.getSerdeInfo().setSerializationLib(
+ org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+ ttbl.setPartitionKeys(new ArrayList());
+
+ MetaException me = null;
+ try {
+ msc.createTable(ttbl);
+ } catch (MetaException e){
+ me = e;
+ }
+ assertNotNull(me);
+ assertTrue(me.getMessage().indexOf("No privilege") != -1);
+
driver.run("grant create on database "+dbName+" to user "+userName);
driver.run("use " + dbName);
@@ -161,10 +209,39 @@ public class TestDefaultHiveMetastoreAuthorizationProvider extends TestCase {
assertEquals(1,ret.getResponseCode());
+ ttbl.setTableName(tblName+"mal");
+ me = null;
+ try {
+ msc.createTable(ttbl);
+ } catch (MetaException e){
+ me = e;
+ }
+ assertNotNull(me);
+ assertTrue(me.getMessage().indexOf("No privilege") != -1);
ret = driver.run("alter table "+tblName+" add partition (b='2011')");
assertEquals(1,ret.getResponseCode());
+ List ptnVals = new ArrayList();
+ ptnVals.add("b=2011");
+ Partition tpart = new Partition();
+ tpart.setDbName(dbName);
+ tpart.setTableName(tblName);
+ tpart.setValues(ptnVals);
+ tpart.setParameters(new HashMap());
+ tpart.setSd(tbl.getSd().deepCopy());
+ tpart.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo().deepCopy());
+ tpart.getSd().setLocation(tbl.getSd().getLocation() + "/tpart");
+
+ me = null;
+ try {
+ msc.add_partition(tpart);
+ } catch (MetaException e){
+ me = e;
+ }
+ assertNotNull(me);
+ assertTrue(me.getMessage().indexOf("No privilege") != -1);
+
InjectableDummyAuthenticator.injectMode(false);
ret = driver.run("alter table "+tblName+" add partition (b='2011')");