diff --git common/src/java/org/apache/hadoop/hive/conf/Constants.java common/src/java/org/apache/hadoop/hive/conf/Constants.java index 7b2c234600..20b2ad3060 100644 --- common/src/java/org/apache/hadoop/hive/conf/Constants.java +++ common/src/java/org/apache/hadoop/hive/conf/Constants.java @@ -77,4 +77,6 @@ /** A named lock is acquired prior to executing the query; enabling to run queries in parallel which might interfere with eachother. */ public static final String HIVE_QUERY_EXCLUSIVE_LOCK = "hive.query.exclusive.lock"; + + public static final String WRITE_ENTITY_PATH = "WRITE_ENTITY_PATH"; } diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java index 2a52e8354b..b621c22edf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java @@ -26,6 +26,7 @@ import javax.security.auth.login.LoginException; +import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.IHMSHandler; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; @@ -132,6 +133,11 @@ public void authorize(Privilege[] readRequiredPriv, Privilege[] writeRequiredPri try { initWh(); root = wh.getWhRoot(); + // when we have custom location in CREATEDATABASE query (create database db1 location 'some_location') + // we should check for access on that location + if (getConf().get(Constants.WRITE_ENTITY_PATH) != null) { + root = new Path(getConf().get(Constants.WRITE_ENTITY_PATH)); + } authorize(root, readRequiredPriv, writeRequiredPriv); } catch (MetaException ex) { throw hiveException(ex); diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java index bbff049411..2a017983db 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV1.java @@ -26,6 +26,7 @@ import java.util.Set; import org.apache.commons.collections.CollectionUtils; +import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.hooks.Entity; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -61,15 +62,26 @@ static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, SessionS Hive db = sem.getDb(); HiveAuthorizationProvider authorizer = ss.getAuthorizer(); - authorizeOperation(op, sem, db, authorizer); + authorizeOperation(op, sem, outputs, db, authorizer); authorizeOutputs(op, outputs, db, authorizer); authorizeInputs(op, sem, inputs, authorizer); } - private static void authorizeOperation(HiveOperation op, BaseSemanticAnalyzer sem, Hive db, + private static void authorizeOperation(HiveOperation op, BaseSemanticAnalyzer sem, Set outputs, Hive db, HiveAuthorizationProvider authorizer) throws HiveException { if (op.equals(HiveOperation.CREATEDATABASE)) { + // if PATH_WRITE is present for CREATEDATABASE, this means we have location present in the query. + // e.g. create database db1 location 'some_location' + // SBA should check for access on this custom location + for (WriteEntity writeEntity : outputs) { + if (WriteEntity.WriteType.PATH_WRITE.equals(writeEntity.getWriteType())) { + // writeEntity.getName() gives fully qualified location + SessionState.get().getConf().set(Constants.WRITE_ENTITY_PATH, writeEntity.getName()); + break; + } + } authorizer.authorize(op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges()); + SessionState.get().getConf().unset(Constants.WRITE_ENTITY_PATH); } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT) || op.equals(HiveOperation.CREATETABLE)) { authorizer.authorize(db.getDatabase(SessionState.get().getCurrentDatabase()), null, HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges()); diff --git ql/src/test/queries/clientnegative/authorization_sba_create_db_with_loc.q ql/src/test/queries/clientnegative/authorization_sba_create_db_with_loc.q new file mode 100644 index 0000000000..f12cf11b5e --- /dev/null +++ ql/src/test/queries/clientnegative/authorization_sba_create_db_with_loc.q @@ -0,0 +1,14 @@ +set hive.security.authorization.enabled=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider; + +-- try to create database where we don't have write permissions and this fails +-- In logs, it generates exception like below. + +-- org.apache.hadoop.hive.ql.metadata.HiveException: java.security.AccessControlException: +-- Permission denied: user=hive_test_user, +-- path="file:/Users/.../hive/itests/qtest/target/tmp/databases_no_write_permissions":schaurasia:staff:dr-xr-xr-x + +dfs -mkdir ${system:test.tmp.dir}/databases_no_write_permissions; +dfs -chmod 555 ${system:test.tmp.dir}/databases_no_write_permissions; + +create database d1 location '${system:test.tmp.dir}/databases_no_write_permissions/d1'; \ No newline at end of file diff --git ql/src/test/results/clientnegative/authorization_sba_create_db_with_loc.q.out ql/src/test/results/clientnegative/authorization_sba_create_db_with_loc.q.out new file mode 100644 index 0000000000..0b8182a69b --- /dev/null +++ ql/src/test/results/clientnegative/authorization_sba_create_db_with_loc.q.out @@ -0,0 +1 @@ +#### A masked pattern was here ####