diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index 53d88b0..f81d2ba 100644 --- itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -33,10 +33,13 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.CommandNeedRetryException; import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator; @@ -52,10 +55,12 @@ * Test HiveAuthorizer api invocation */ public class TestHiveAuthorizerCheckInvocation { + private final Log LOG = LogFactory.getLog(this.getClass().getName());; protected static HiveConf conf; protected static Driver driver; private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + "Table"; + private static final String acidTableName = tableName + "_acid"; private static final String dbName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + "Db"; static HiveAuthorizer mockedAuthorizer; @@ -82,14 +87,18 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); - conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, true); + conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName()); SessionState.start(conf); driver = new Driver(conf); runCmd("create table " + tableName + " (i int, j int, k string) partitioned by (city string, date string) "); runCmd("create database " + dbName); + // Need a separate table for ACID testing since it has to be bucketed and it has to be Acid + runCmd("create table " + acidTableName + " (i int, j int) clustered by (i) into 2 buckets " + + "stored as orc"); } private static void runCmd(String cmd) throws CommandNeedRetryException { @@ -99,6 +108,10 @@ private static void runCmd(String cmd) throws CommandNeedRetryException { @AfterClass public static void afterTests() throws Exception { + // Drop the tables when we're done. This makes the test work inside an IDE + runCmd("drop table if exists " + acidTableName); + runCmd("drop table if exists " + tableName); + runCmd("drop database if exists " + dbName); driver.close(); } @@ -244,6 +257,41 @@ public void testTempFunction() throws HiveAuthzPluginException, HiveAccessContro assertEquals("db name", null, funcObj.getDbname()); } + @Test + public void testUpdateSomeColumnsUsed() throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException { + reset(mockedAuthorizer); + int status = driver.compile("update " + acidTableName + " set i = 5 where j = 3"); + assertEquals(0, status); + + Pair, List> io = getHivePrivilegeObjectInputs(); + List outputs = io.getRight(); + HivePrivilegeObject tableObj = outputs.get(0); + LOG.debug("Got privilege object " + tableObj); + assertEquals("no of columns used", 1, tableObj.getColumns().size()); + assertEquals("Column used", "i", tableObj.getColumns().get(0)); + List inputs = io.getLeft(); + assertEquals(1, inputs.size()); + tableObj = inputs.get(0); + assertEquals(1, tableObj.getColumns().size()); + assertEquals("j", tableObj.getColumns().get(0)); + } + + @Test + public void testDelete() throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException { + reset(mockedAuthorizer); + int status = driver.compile("delete from " + acidTableName + " where j = 3"); + assertEquals(0, status); + + Pair, List> io = getHivePrivilegeObjectInputs(); + List inputs = io.getLeft(); + assertEquals(1, inputs.size()); + HivePrivilegeObject tableObj = inputs.get(0); + assertEquals(1, tableObj.getColumns().size()); + assertEquals("j", tableObj.getColumns().get(0)); + } + private void checkSingleTableInput(List inputs) { assertEquals("number of inputs", 1, inputs.size()); diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index 298f429..5e46590 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -503,9 +503,11 @@ public static void doAuthorization(BaseSemanticAnalyzer sem, String command) // get mapping of tables to columns used ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo(); // colAccessInfo is set only in case of SemanticAnalyzer - Map> tab2Cols = colAccessInfo != null ? colAccessInfo + Map> selectTab2Cols = colAccessInfo != null ? colAccessInfo .getTableToColumnAccessMap() : null; - doAuthorizationV2(ss, op, inputs, outputs, command, tab2Cols); + Map> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ? + sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null; + doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols); return; } if (op == null) { @@ -696,7 +698,13 @@ private static void getTablePartitionUsedColumns(HiveOperation op, BaseSemanticA } private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet inputs, - HashSet outputs, String command, Map> tab2cols) throws HiveException { + HashSet outputs, String command, Map> tab2cols, + Map> updateTab2Cols) throws HiveException { + + /* comment for reviewers -> updateTab2Cols needed to be separate from tab2cols because if I + pass tab2cols to getHivePrivObjects for the output case it will trip up insert/selects, + since the insert will get passed the columns from the select. + */ HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder(); authzContextBuilder.setUserIpAddress(ss.getUserIpAddress()); @@ -704,7 +712,7 @@ private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet HiveOperationType hiveOpType = getHiveOperationType(op); List inputsHObjs = getHivePrivObjects(inputs, tab2cols); - List outputHObjs = getHivePrivObjects(outputs, null); + List outputHObjs = getHivePrivObjects(outputs, updateTab2Cols); ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, authzContextBuilder.build()); } @@ -730,12 +738,6 @@ private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet //do not authorize temporary uris continue; } - if (privObject instanceof ReadEntity && ((ReadEntity)privObject).isUpdateOrDelete()) { - // Skip this one, as we don't want to check select privileges for the table we're reading - // for an update or delete. - continue; - } - //support for authorization on partitions needs to be added String dbname = null; String objName = null; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index b2f66e0..4ff9678 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -115,6 +115,10 @@ protected LineageInfo linfo; protected TableAccessInfo tableAccessInfo; protected ColumnAccessInfo columnAccessInfo; + /** + * Columns accessed by updates + */ + protected ColumnAccessInfo updateColumnAccessInfo; public boolean skipAuthorization() { @@ -866,6 +870,14 @@ public void setColumnAccessInfo(ColumnAccessInfo columnAccessInfo) { this.columnAccessInfo = columnAccessInfo; } + public ColumnAccessInfo getUpdateColumnAccessInfo() { + return updateColumnAccessInfo; + } + + public void setUpdateColumnAccessInfo(ColumnAccessInfo updateColumnAccessInfo) { + this.updateColumnAccessInfo = updateColumnAccessInfo; + } + protected LinkedHashMap extractPartitionSpecs(Tree partspec) throws SemanticException { LinkedHashMap partSpec = new LinkedHashMap(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java index a4df8b4..8416cff 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessInfo.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.parse; +import org.apache.hadoop.hive.ql.metadata.VirtualColumn; + import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -54,4 +56,21 @@ public void add(String table, String col) { } return mapping; } + + /** + * Strip a virtual column out of the set of columns. This is useful in cases where we do not + * want to be checking against the user reading virtual columns, namely update and delete. + * @param vc + */ + public void stripVirtualColumn(VirtualColumn vc) { + for (Map.Entry> e : tableToColumnAccessMap.entrySet()) { + for (String columnName : e.getValue()) { + if (vc.getName().equalsIgnoreCase(columnName)) { + e.getValue().remove(columnName); + break; + } + } + } + + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java index 3aaa09c..baff311 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; +import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.session.SessionState; @@ -148,6 +149,7 @@ private void reparseAndSuperAnalyze(ASTNode tree) throws SemanticException { rewrittenQueryStr.append(" select ROW__ID"); Map setColExprs = null; + Map setCols = null; if (updating()) { // An update needs to select all of the columns, as we rewrite the entire row. Also, // we need to figure out which columns we are going to replace. We won't write the set @@ -160,7 +162,7 @@ private void reparseAndSuperAnalyze(ASTNode tree) throws SemanticException { // Get the children of the set clause, each of which should be a column assignment List assignments = setClause.getChildren(); - Map setCols = new HashMap(assignments.size()); + setCols = new HashMap(assignments.size()); setColExprs = new HashMap(assignments.size()); for (Node a : assignments) { ASTNode assignment = (ASTNode)a; @@ -323,6 +325,22 @@ private void reparseAndSuperAnalyze(ASTNode tree) throws SemanticException { WriteEntity.WriteType.UPDATE); } } + + // For updates, we need to set the column access info so that it contains information on + // the columns we are updating. + if (updating()) { + ColumnAccessInfo cai = new ColumnAccessInfo(); + for (String colName : setCols.keySet()) { + cai.add(Table.getCompleteName(mTable.getDbName(), mTable.getTableName()), colName); + } + setUpdateColumnAccessInfo(cai); + } + + // We need to weed ROW__ID out of the input column info, as it doesn't make any sense to + // require the user to have authorization on that column. + if (columnAccessInfo != null) { + columnAccessInfo.stripVirtualColumn(VirtualColumn.ROWID); + } } private String operation() { diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java index 93df9f4..1e1f3da 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java @@ -310,9 +310,12 @@ public static HivePrivObjectActionType getActionType(Entity privObject) { return HivePrivObjectActionType.INSERT; case INSERT_OVERWRITE: return HivePrivObjectActionType.INSERT_OVERWRITE; + case UPDATE: + return HivePrivObjectActionType.UPDATE; + case DELETE: + return HivePrivObjectActionType.DELETE; default: - // Ignore other types for purposes of authorization, we are interested only - // in INSERT vs INSERT_OVERWRITE as of now + // Ignore other types for purposes of authorization break; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java index 093b4fd..01d9cb6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java @@ -81,7 +81,7 @@ private int compare(Collection o1, Collection o2) { GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, COMMAND_PARAMS, FUNCTION } ; public enum HivePrivObjectActionType { - OTHER, INSERT, INSERT_OVERWRITE + OTHER, INSERT, INSERT_OVERWRITE, UPDATE, DELETE }; private final HivePrivilegeObjectType type; diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index 3236341..d43eee4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -118,6 +118,7 @@ public HivePrivilegeObjectType getObjectType() { private static SQLPrivTypeGrant[] ADMIN_PRIV_AR = arr(SQLPrivTypeGrant.ADMIN_PRIV); private static SQLPrivTypeGrant[] INS_NOGRANT_AR = arr(SQLPrivTypeGrant.INSERT_NOGRANT); private static SQLPrivTypeGrant[] DEL_NOGRANT_AR = arr(SQLPrivTypeGrant.DELETE_NOGRANT); + private static SQLPrivTypeGrant[] UPD_NOGRANT_AR = arr(SQLPrivTypeGrant.UPDATE_NOGRANT); private static SQLPrivTypeGrant[] OWNER_INS_SEL_DEL_NOGRANT_AR = arr(SQLPrivTypeGrant.OWNER_PRIV, SQLPrivTypeGrant.INSERT_NOGRANT, @@ -287,8 +288,14 @@ public HivePrivilegeObjectType getObjectType() { op2Priv.put(HiveOperationType.QUERY, arr( new PrivRequirement(SEL_NOGRANT_AR, IOType.INPUT), - new PrivRequirement(INS_NOGRANT_AR, IOType.OUTPUT, null), - new PrivRequirement(DEL_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.INSERT_OVERWRITE) + new PrivRequirement(INS_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.INSERT), + new PrivRequirement( + arr(SQLPrivTypeGrant.INSERT_NOGRANT, SQLPrivTypeGrant.DELETE_NOGRANT), + IOType.OUTPUT, + HivePrivObjectActionType.INSERT_OVERWRITE), + new PrivRequirement(DEL_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.DELETE), + new PrivRequirement(UPD_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.UPDATE), + new PrivRequirement(INS_NOGRANT_AR, IOType.OUTPUT, HivePrivObjectActionType.OTHER) ) ); diff --git ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q new file mode 100644 index 0000000..090495a --- /dev/null +++ ql/src/test/queries/clientnegative/authorization_delete_nodeletepriv.q @@ -0,0 +1,17 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.enforce.bucketing=true; + + +-- check update without update priv +create table auth_nodel(i int) clustered by (i) into 2 buckets stored as orc;; + +set user.name=user1; +delete from auth_nodel where i > 0; + diff --git ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q new file mode 100644 index 0000000..922beba --- /dev/null +++ ql/src/test/queries/clientnegative/authorization_update_noupdatepriv.q @@ -0,0 +1,17 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.enforce.bucketing=true; + + +-- check update without update priv +create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc;; + +set user.name=user1; +update auth_noupd set i = 0 where i > 0; + diff --git ql/src/test/queries/clientpositive/authorization_delete.q ql/src/test/queries/clientpositive/authorization_delete.q new file mode 100644 index 0000000..ebd0315 --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_delete.q @@ -0,0 +1,25 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; + +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.enforce.bucketing=true; + +set user.name=user1; +-- current user has been set (comment line before the set cmd is resulting in parse error!!) + +CREATE TABLE t_auth_del(i int) clustered by (i) into 2 buckets stored as orc; + +-- grant update privilege to another user +GRANT DELETE ON t_auth_del TO USER userWIns; +GRANT SELECT ON t_auth_del TO USER userWIns; + +set user.name=hive_admin_user; +set role admin; +SHOW GRANT ON TABLE t_auth_del; + + +set user.name=userWIns; +delete from t_auth_del where i > 0; diff --git ql/src/test/queries/clientpositive/authorization_delete_own_table.q ql/src/test/queries/clientpositive/authorization_delete_own_table.q new file mode 100644 index 0000000..19dbbeb --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_delete_own_table.q @@ -0,0 +1,17 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.enforce.bucketing=true; + + +set user.name=user1; +create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc;; +delete from auth_noupd where i > 0; + +set user.name=hive_admin_user; +set role admin; diff --git ql/src/test/queries/clientpositive/authorization_update.q ql/src/test/queries/clientpositive/authorization_update.q new file mode 100644 index 0000000..18ceadb --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_update.q @@ -0,0 +1,28 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; + +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.enforce.bucketing=true; + +set user.name=user1; +-- current user has been set (comment line before the set cmd is resulting in parse error!!) + +CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc; + +CREATE TABLE t_select(i int); +GRANT ALL ON TABLE t_select TO ROLE public; + +-- grant update privilege to another user +GRANT UPDATE ON t_auth_up TO USER userWIns; +GRANT SELECT ON t_auth_up TO USER userWIns; + +set user.name=hive_admin_user; +set role admin; +SHOW GRANT ON TABLE t_auth_up; + + +set user.name=userWIns; +update t_auth_up set i = 0 where i > 0; diff --git ql/src/test/queries/clientpositive/authorization_update_own_table.q ql/src/test/queries/clientpositive/authorization_update_own_table.q new file mode 100644 index 0000000..46beb49 --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_update_own_table.q @@ -0,0 +1,17 @@ +set hive.test.authz.sstd.hs2.mode=true; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; + +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.enforce.bucketing=true; + + +set user.name=user1; +create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc;; +update auth_noupd set i = 0 where i > 0; + +set user.name=hive_admin_user; +set role admin; diff --git ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out new file mode 100644 index 0000000..00fb3f7 --- /dev/null +++ ql/src/test/results/clientnegative/authorization_delete_nodeletepriv.q.out @@ -0,0 +1,11 @@ +PREHOOK: query: -- check update without update priv +create table auth_nodel(i int) clustered by (i) into 2 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@auth_nodel +POSTHOOK: query: -- check update without update priv +create table auth_nodel(i int) clustered by (i) into 2 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@auth_nodel +FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation QUERY [[DELETE] on Object [type=TABLE_OR_VIEW, name=default.auth_nodel], [SELECT] on Object [type=TABLE_OR_VIEW, name=default.auth_nodel]] diff --git ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out new file mode 100644 index 0000000..e4b3c5e --- /dev/null +++ ql/src/test/results/clientnegative/authorization_update_noupdatepriv.q.out @@ -0,0 +1,11 @@ +PREHOOK: query: -- check update without update priv +create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@auth_noupd +POSTHOOK: query: -- check update without update priv +create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@auth_noupd +FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation QUERY [[SELECT] on Object [type=TABLE_OR_VIEW, name=default.auth_noupd], [UPDATE] on Object [type=TABLE_OR_VIEW, name=default.auth_noupd]] diff --git ql/src/test/results/clientpositive/authorization_delete.q.out ql/src/test/results/clientpositive/authorization_delete.q.out new file mode 100644 index 0000000..9aa4600 --- /dev/null +++ ql/src/test/results/clientpositive/authorization_delete.q.out @@ -0,0 +1,48 @@ +PREHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!) + +CREATE TABLE t_auth_del(i int) clustered by (i) into 2 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t_auth_del +POSTHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!) + +CREATE TABLE t_auth_del(i int) clustered by (i) into 2 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t_auth_del +PREHOOK: query: -- grant update privilege to another user +GRANT DELETE ON t_auth_del TO USER userWIns +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t_auth_del +POSTHOOK: query: -- grant update privilege to another user +GRANT DELETE ON t_auth_del TO USER userWIns +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t_auth_del +PREHOOK: query: GRANT SELECT ON t_auth_del TO USER userWIns +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t_auth_del +POSTHOOK: query: GRANT SELECT ON t_auth_del TO USER userWIns +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t_auth_del +PREHOOK: query: set role admin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role admin +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: SHOW GRANT ON TABLE t_auth_del +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: SHOW GRANT ON TABLE t_auth_del +POSTHOOK: type: SHOW_GRANT +default t_auth_del user1 USER DELETE true -1 user1 +default t_auth_del user1 USER INSERT true -1 user1 +default t_auth_del user1 USER SELECT true -1 user1 +default t_auth_del user1 USER UPDATE true -1 user1 +default t_auth_del userWIns USER DELETE false -1 user1 +default t_auth_del userWIns USER SELECT false -1 user1 +PREHOOK: query: delete from t_auth_del where i > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@t_auth_del +PREHOOK: Output: default@t_auth_del +POSTHOOK: query: delete from t_auth_del where i > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t_auth_del +POSTHOOK: Output: default@t_auth_del diff --git ql/src/test/results/clientpositive/authorization_delete_own_table.q.out ql/src/test/results/clientpositive/authorization_delete_own_table.q.out new file mode 100644 index 0000000..1e0f9c8 --- /dev/null +++ ql/src/test/results/clientpositive/authorization_delete_own_table.q.out @@ -0,0 +1,20 @@ +PREHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@auth_noupd +POSTHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@auth_noupd +PREHOOK: query: delete from auth_noupd where i > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@auth_noupd +PREHOOK: Output: default@auth_noupd +POSTHOOK: query: delete from auth_noupd where i > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@auth_noupd +POSTHOOK: Output: default@auth_noupd +PREHOOK: query: set role admin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role admin +POSTHOOK: type: SHOW_ROLES diff --git ql/src/test/results/clientpositive/authorization_update.q.out ql/src/test/results/clientpositive/authorization_update.q.out new file mode 100644 index 0000000..019d363 --- /dev/null +++ ql/src/test/results/clientpositive/authorization_update.q.out @@ -0,0 +1,62 @@ +PREHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!) + +CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t_auth_up +POSTHOOK: query: -- current user has been set (comment line before the set cmd is resulting in parse error!!) + +CREATE TABLE t_auth_up(i int) clustered by (i) into 2 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t_auth_up +PREHOOK: query: CREATE TABLE t_select(i int) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t_select +POSTHOOK: query: CREATE TABLE t_select(i int) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t_select +PREHOOK: query: GRANT ALL ON TABLE t_select TO ROLE public +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t_select +POSTHOOK: query: GRANT ALL ON TABLE t_select TO ROLE public +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t_select +PREHOOK: query: -- grant update privilege to another user +GRANT UPDATE ON t_auth_up TO USER userWIns +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t_auth_up +POSTHOOK: query: -- grant update privilege to another user +GRANT UPDATE ON t_auth_up TO USER userWIns +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t_auth_up +PREHOOK: query: GRANT SELECT ON t_auth_up TO USER userWIns +PREHOOK: type: GRANT_PRIVILEGE +PREHOOK: Output: default@t_auth_up +POSTHOOK: query: GRANT SELECT ON t_auth_up TO USER userWIns +POSTHOOK: type: GRANT_PRIVILEGE +POSTHOOK: Output: default@t_auth_up +PREHOOK: query: set role admin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role admin +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: SHOW GRANT ON TABLE t_auth_up +PREHOOK: type: SHOW_GRANT +POSTHOOK: query: SHOW GRANT ON TABLE t_auth_up +POSTHOOK: type: SHOW_GRANT +default t_auth_up user1 USER DELETE true -1 user1 +default t_auth_up user1 USER INSERT true -1 user1 +default t_auth_up user1 USER SELECT true -1 user1 +default t_auth_up user1 USER UPDATE true -1 user1 +default t_auth_up userWIns USER SELECT false -1 user1 +default t_auth_up userWIns USER UPDATE false -1 user1 +PREHOOK: query: update t_auth_up set i = 0 where i > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@t_auth_up +PREHOOK: Output: default@t_auth_up +POSTHOOK: query: update t_auth_up set i = 0 where i > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@t_auth_up +POSTHOOK: Output: default@t_auth_up diff --git ql/src/test/results/clientpositive/authorization_update_own_table.q.out ql/src/test/results/clientpositive/authorization_update_own_table.q.out new file mode 100644 index 0000000..cbf8f57 --- /dev/null +++ ql/src/test/results/clientpositive/authorization_update_own_table.q.out @@ -0,0 +1,20 @@ +PREHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@auth_noupd +POSTHOOK: query: create table auth_noupd(i int) clustered by (i) into 2 buckets stored as orc +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@auth_noupd +PREHOOK: query: update auth_noupd set i = 0 where i > 0 +PREHOOK: type: QUERY +PREHOOK: Input: default@auth_noupd +PREHOOK: Output: default@auth_noupd +POSTHOOK: query: update auth_noupd set i = 0 where i > 0 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@auth_noupd +POSTHOOK: Output: default@auth_noupd +PREHOOK: query: set role admin +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: set role admin +POSTHOOK: type: SHOW_ROLES