diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java index 8487e3a915e..f1e86694d65 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java @@ -19,7 +19,6 @@ package org.apache.hive.hcatalog.cli.SemanticAnalysis; -import java.io.Serializable; import java.util.List; import org.apache.hadoop.hive.metastore.api.Database; @@ -123,7 +122,8 @@ protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context, protected void authorize(Privilege[] inputPrivs, Privilege[] outputPrivs) throws AuthorizationException, SemanticException { try { - getAuthProvider().authorize(inputPrivs, outputPrivs); + getAuthProvider().authorizeDbLevelOperations(inputPrivs, outputPrivs, + null, null); } catch (HiveException ex) { throw new SemanticException(ex); } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java index 86d9a1836ca..46d1d9e6dde 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java @@ -19,10 +19,13 @@ package org.apache.hive.hcatalog.storagehandler; +import java.util.Collection; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -77,8 +80,8 @@ public void setAuthenticator(HiveAuthenticationProvider authenticator) { * org.apache.hadoop.hive.ql.security.authorization.Privilege[]) */ @Override - public void authorize(Privilege[] readRequiredPriv, - Privilege[] writeRequiredPriv) throws HiveException, + public void authorizeDbLevelOperations(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv, + Collection inputs, Collection outputs) throws HiveException, AuthorizationException { } diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java index 3fdacac6c56..77c7c225a06 100644 --- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java +++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java @@ -19,8 +19,11 @@ package org.apache.hadoop.hive.ql.security; import java.util.ArrayList; +import java.util.Collection; import java.util.List; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -116,7 +119,8 @@ public void init(Configuration conf) throws HiveException { } @Override - public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + public void authorizeDbLevelOperations(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv, + Collection inputs, Collection outputs) throws HiveException, AuthorizationException { debugLog("DHMAP.authorize " + "read:" + debugPrivPrint(readRequiredPriv) + diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java index 7c8affb3b56..9117b0efafd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/BitSetCheckedAuthorizationProvider.java @@ -28,6 +28,8 @@ import org.apache.hadoop.hive.metastore.api.HiveObjectType; import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -63,8 +65,8 @@ public static BitSetChecker getBitSetChecker(Privilege[] inputRequiredPriv, } @Override - public void authorize(Privilege[] inputRequiredPriv, - Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { + public void authorizeDbLevelOperations(Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv, + Collection inputs, Collection outputs) throws HiveException, AuthorizationException { BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, outputRequiredPriv); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java index 0e644b4087d..25c1bd6dd71 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java @@ -18,11 +18,14 @@ package org.apache.hadoop.hive.ql.security.authorization; +import java.util.Collection; import java.util.List; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -49,12 +52,15 @@ * a list of privileges needed for inputs. * @param writeRequiredPriv * a list of privileges needed for outputs. + * @param inputs + * input entities to be authorized + * @param outputs + * output entities to be authorized * @throws HiveException * @throws AuthorizationException */ - public void authorize(Privilege[] readRequiredPriv, - Privilege[] writeRequiredPriv) throws HiveException, - AuthorizationException; + void authorizeDbLevelOperations(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv, + Collection inputs, Collection outputs) throws HiveException, AuthorizationException; /** * Authorization privileges against a database object. diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java index 895f34f3cf6..44fbd983b41 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/MetaStoreAuthzAPIAuthorizerEmbedOnly.java @@ -18,12 +18,15 @@ package org.apache.hadoop.hive.ql.security.authorization; +import java.util.Collection; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.IHMSHandler; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -45,7 +48,8 @@ public void init(Configuration conf) throws HiveException { } @Override - public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + public void authorizeDbLevelOperations(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv, + Collection inputs, Collection outputs) throws HiveException, AuthorizationException { // not authorized by this implementation, ie operation is allowed by it } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java index 2a52e8354bc..076f95ecbee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java @@ -21,15 +21,18 @@ import java.io.IOException; import java.security.AccessControlException; import java.util.ArrayList; +import java.util.Collection; import java.util.EnumSet; import java.util.List; import javax.security.auth.login.LoginException; +import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.IHMSHandler; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; -import org.apache.hadoop.hive.metastore.utils.SecurityUtils; +import org.apache.hadoop.hive.ql.hooks.ReadEntity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -108,7 +111,8 @@ public void init(Configuration conf) throws HiveException { } @Override - public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) + public void authorizeDbLevelOperations(Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv, + Collection inputs, Collection outputs) throws HiveException, AuthorizationException { // Currently not used in hive code-base, but intended to authorize actions // that are directly user-level. As there's no storage based aspect to this, @@ -132,6 +136,15 @@ public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPri try { initWh(); root = wh.getWhRoot(); + // When we have some path in outputs, we should check access on that path, usually happens when + // we have HiveOperation.CREATEDATABASE query with some location + // or we have HiveOperation.ALTERDATABASE_LOCATION + for (WriteEntity writeEntity : outputs) { + if (WriteEntity.WriteType.PATH_WRITE.equals(writeEntity.getWriteType())) { + root = new Path(writeEntity.getName()); + break; + } + } authorize(root, readRequiredPriv, writeRequiredPriv); } catch (MetaException ex) { throw hiveException(ex); @@ -141,6 +154,13 @@ public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPri @Override public void authorize(Database db, Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv) throws HiveException, AuthorizationException { + + try { + initWh(); + } catch (MetaException ex) { + throw hiveException(ex); + } + Path path = getDbLocation(db); // extract drop privileges diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java index bbff049411d..e14e73bfb83 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java @@ -26,6 +26,7 @@ import java.util.Set; import org.apache.commons.collections.CollectionUtils; +import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -61,15 +62,17 @@ static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, SessionS Hive db = sem.getDb(); HiveAuthorizationProvider authorizer = ss.getAuthorizer(); - authorizeOperation(op, sem, db, authorizer); + authorizeOperation(op, sem, inputs, outputs, db, authorizer); authorizeOutputs(op, outputs, db, authorizer); authorizeInputs(op, sem, inputs, authorizer); } - private static void authorizeOperation(HiveOperation op, BaseSemanticAnalyzer sem, Hive db, + private static void authorizeOperation(HiveOperation op, BaseSemanticAnalyzer sem, Set inputs, + Set outputs, Hive db, HiveAuthorizationProvider authorizer) throws HiveException { - if (op.equals(HiveOperation.CREATEDATABASE)) { - authorizer.authorize(op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges()); + if (op.equals(HiveOperation.CREATEDATABASE) || op.equals(HiveOperation.ALTERDATABASE_LOCATION)) { + authorizer.authorizeDbLevelOperations(op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges(), + inputs, outputs); } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { authorizer.authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); diff --git a/ql/src/test/queries/clientnegative/authorization_sba_alter_db_loc.q b/ql/src/test/queries/clientnegative/authorization_sba_alter_db_loc.q new file mode 100644 index 00000000000..5b9a30111bf --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_sba_alter_db_loc.q @@ -0,0 +1,14 @@ +set hive.security.authorization.enabled=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; + +-- try to alter database location where we don't have write permissions and this fails +-- In logs, it generates exception like below. + +-- org.apache.hadoop.hive.ql.metadata.HiveException: java.security.AccessControlException: +-- Permission denied: user=hive_test_user, +-- path="file:/Users/.../hive/itests/qtest/target/tmp/databases_no_write_permissions":schaurasia:staff:dr-xr-xr-x + +dfs -mkdir ${system:test.tmp.dir}/databases_no_write_permissions; +dfs -chmod 555 ${system:test.tmp.dir}/databases_no_write_permissions; + +alter database default set location '${system:test.tmp.dir}/databases_no_write_permissions/d1'; \ No newline at end of file diff --git a/ql/src/test/queries/clientnegative/authorization_sba_create_db_with_loc.q b/ql/src/test/queries/clientnegative/authorization_sba_create_db_with_loc.q new file mode 100644 index 00000000000..4ceef6b7363 --- /dev/null +++ b/ql/src/test/queries/clientnegative/authorization_sba_create_db_with_loc.q @@ -0,0 +1,14 @@ +set hive.security.authorization.enabled=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; + +-- try to create database where we don't have write permissions and this fails +-- In logs, it generates exception like below. + +-- org.apache.hadoop.hive.ql.metadata.HiveException: java.security.AccessControlException: +-- Permission denied: user=hive_test_user, +-- path="file:/Users/.../hive/itests/qtest/target/tmp/databases_no_write_permissions":schaurasia:staff:dr-xr-xr-x + +dfs -mkdir -p ${system:test.tmp.dir}/databases_no_write_permissions; +dfs -chmod 555 ${system:test.tmp.dir}/databases_no_write_permissions; + +create database d1 location '${system:test.tmp.dir}/databases_no_write_permissions/d1'; \ No newline at end of file diff --git a/ql/src/test/results/clientnegative/authorization_sba_alter_db_loc.q.out b/ql/src/test/results/clientnegative/authorization_sba_alter_db_loc.q.out new file mode 100644 index 00000000000..0b8182a69b1 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_sba_alter_db_loc.q.out @@ -0,0 +1 @@ +#### A masked pattern was here #### diff --git a/ql/src/test/results/clientnegative/authorization_sba_create_db_with_loc.q.out b/ql/src/test/results/clientnegative/authorization_sba_create_db_with_loc.q.out new file mode 100644 index 00000000000..0b8182a69b1 --- /dev/null +++ b/ql/src/test/results/clientnegative/authorization_sba_create_db_with_loc.q.out @@ -0,0 +1 @@ +#### A masked pattern was here ####