diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java index bc2a34af90..1766bbca32 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java @@ -37,6 +37,7 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.DriverFactory; import org.apache.hadoop.hive.ql.IDriver; import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; @@ -62,7 +63,7 @@ public class TestHiveAuthorizerCheckInvocation { private final Logger LOG = LoggerFactory.getLogger(this.getClass().getName());; protected static HiveConf conf; - protected static IDriver driver; + protected static Driver driver; private static final String tableName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() + "Table"; private static final String viewName = TestHiveAuthorizerCheckInvocation.class.getSimpleName() @@ -102,7 +103,7 @@ public static void beforeTest() throws Exception { conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict"); SessionState.start(conf); - driver = DriverFactory.newDriver(conf); + driver = new Driver(conf); runCmd("create table " + tableName + " (i int, j int, k string) partitioned by (city string, `date` string) "); runCmd("create view " + viewName + " as select * from " + tableName); @@ -125,7 +126,7 @@ public static void afterTests() throws Exception { runCmd("drop table if exists " + tableName); runCmd("drop table if exists " + viewName); runCmd("drop table if exists " + fullInTableName); - runCmd("drop database if exists " + dbName ); + runCmd("drop database if exists " + dbName + " CASCADE"); driver.close(); } @@ -269,7 +270,7 @@ public void testPermFunction() throws Exception { HivePrivilegeObject funcObj; HivePrivilegeObject dbObj; - assertEquals("number of output object", 2, outputs.size()); + assertEquals("number of output objects", 2, outputs.size()); if(outputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) { funcObj = outputs.get(0); dbObj = outputs.get(1); @@ -284,6 +285,89 @@ public void testPermFunction() throws Exception { assertEquals("input type", HivePrivilegeObjectType.DATABASE, dbObj.getType()); assertTrue("db name", dbName.equalsIgnoreCase(dbObj.getDbname())); + + // actually create the permanent function + CommandProcessorResponse cresponse = driver.run(null, true); + assertEquals(0, cresponse.getResponseCode()); + + // Verify privilege objects + reset(mockedAuthorizer); + status = driver.compile("select " + dbName + "." + funcName + "() , i from " + tableName); + assertEquals(0, status); + + List inputs = getHivePrivilegeObjectInputs().getLeft(); + assertEquals("number of input objects", 2, inputs.size()); + HivePrivilegeObject tableObj; + if (inputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) { + funcObj = inputs.get(0); + tableObj = inputs.get(1); + } else { + funcObj = inputs.get(1); + tableObj = inputs.get(0); + } + + assertEquals("input type", HivePrivilegeObjectType.FUNCTION, funcObj.getType()); + assertEquals("function name", funcName.toLowerCase(), funcObj.getObjectName().toLowerCase()); + assertEquals("db name", dbName.toLowerCase(), funcObj.getDbname().toLowerCase()); + + assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, tableObj.getType()); + assertEquals("table name", tableName.toLowerCase(), tableObj.getObjectName().toLowerCase()); + + // create 2nd permanent function + String funcName2 = "funcName2"; + cresponse = driver + .run("create function " + dbName + "." + funcName2 + " as 'org.apache.hadoop.hive.ql.udf.UDFRand'"); + assertEquals(0, cresponse.getResponseCode()); + + // try using 2nd permanent function and verify its only 2nd one that shows up + // for auth + reset(mockedAuthorizer); + status = driver.compile("select " + dbName + "." + funcName2 + "(i) from " + tableName); + assertEquals(0, status); + + inputs = getHivePrivilegeObjectInputs().getLeft(); + assertEquals("number of input objects", 2, inputs.size()); + if (inputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) { + funcObj = inputs.get(0); + tableObj = inputs.get(1); + } else { + funcObj = inputs.get(1); + tableObj = inputs.get(0); + } + + assertEquals("input type", HivePrivilegeObjectType.FUNCTION, funcObj.getType()); + assertEquals("function name", funcName2.toLowerCase(), funcObj.getObjectName().toLowerCase()); + assertEquals("db name", dbName.toLowerCase(), funcObj.getDbname().toLowerCase()); + + assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, tableObj.getType()); + assertEquals("table name", tableName.toLowerCase(), tableObj.getObjectName().toLowerCase()); + + // try using both permanent functions + reset(mockedAuthorizer); + status = driver.compile( + "select " + dbName + "." + funcName2 + "(i), " + dbName + "." + funcName + "(), j from " + tableName); + assertEquals(0, status); + + inputs = getHivePrivilegeObjectInputs().getLeft(); + assertEquals("number of input objects", 3, inputs.size()); + boolean foundF1 = false; + boolean foundF2 = false; + boolean foundTable = false; + for (HivePrivilegeObject inp : inputs) { + if (inp.getType() == HivePrivilegeObjectType.FUNCTION) { + if (funcName.equalsIgnoreCase(inp.getObjectName())) { + foundF1 = true; + } else if (funcName2.equalsIgnoreCase(inp.getObjectName())) { + foundF2 = true; + } + } else if (inp.getType() == HivePrivilegeObjectType.TABLE_OR_VIEW + && tableName.equalsIgnoreCase(inp.getObjectName().toLowerCase())) { + foundTable = true; + } + } + assertTrue("Found " + funcName, foundF1); + assertTrue("Found " + funcName2, foundF2); + assertTrue("Found Table", foundTable); } @Test diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java index ed3984efe8..4cc0dff109 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -33,11 +33,13 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Queue; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Stream; import com.google.common.annotations.VisibleForTesting; @@ -57,6 +59,7 @@ import org.apache.hadoop.hive.conf.VariableSubstitution; import org.apache.hadoop.hive.metastore.ColumnType; import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Schema; import org.apache.hadoop.hive.ql.cache.results.CacheUsage; @@ -66,6 +69,9 @@ import org.apache.hadoop.hive.ql.exec.DagUtils; import org.apache.hadoop.hive.ql.exec.ExplainTask; import org.apache.hadoop.hive.ql.exec.FetchTask; +import org.apache.hadoop.hive.ql.exec.FunctionInfo; +import org.apache.hadoop.hive.ql.exec.FunctionUtils; +import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; @@ -75,6 +81,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.history.HiveHistory.Keys; import org.apache.hadoop.hive.ql.hooks.Entity; +import org.apache.hadoop.hive.ql.hooks.Entity.Type; import org.apache.hadoop.hive.ql.hooks.HookContext; import org.apache.hadoop.hive.ql.hooks.HookUtils; import org.apache.hadoop.hive.ql.hooks.PrivateHookContext; @@ -599,7 +606,9 @@ public void run() { perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARSE); hookRunner.runBeforeCompileHook(command); - + // clear CurrentFunctionsInUse set, to capture new set of functions + // that SemanticAnalyzer finds are in use + SessionState.get().getCurrentFunctionsInUse().clear(); perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ANALYZE); // Flush the metastore cache. This assures that we don't pick up objects from a previous @@ -932,12 +941,22 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S // get mapping of tables to columns used ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo(); // colAccessInfo is set only in case of SemanticAnalyzer - Map> selectTab2Cols = colAccessInfo != null ? colAccessInfo - .getTableToColumnAccessMap() : null; - Map> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ? - sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null; - doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols); - return; + Map> selectTab2Cols = colAccessInfo != null + ? colAccessInfo.getTableToColumnAccessMap() : null; + Map> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null + ? sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null; + + // convert to List as above Set was created using Sets.union (for reasons + // explained there) + // but that Set is immutable + List inputList = new ArrayList(inputs); + List outputList = new ArrayList(outputs); + + // add permanent UDFs being used + inputList.addAll(getPermanentFunctionEntities(ss)); + + doAuthorizationV2(ss, op, inputList, outputList, command, selectTab2Cols, updateTab2Cols); + return; } if (op == null) { throw new HiveException("Operation should not be null"); @@ -1077,6 +1096,29 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S } } + private static List getPermanentFunctionEntities(SessionState ss) throws HiveException { + List functionEntities = new ArrayList<>(); + for (Entry permFunction : ss.getCurrentFunctionsInUse().entrySet()) { + if (permFunction.getValue().getFunctionType() != FunctionType.PERSISTENT) { + // Only permanent functions need to be authorized. + // Built-in function access is allowed to all users. + // If user can create a temp function, they should be able to use it + // without additional authorization. + continue; + } + functionEntities.add(createReadEntity(permFunction.getKey(), permFunction.getValue())); + } + return functionEntities; + } + + private static ReadEntity createReadEntity(String functionName, FunctionInfo functionInfo) + throws HiveException { + String[] qualFunctionName = FunctionUtils.getQualifiedFunctionNameParts(functionName); + // this is only for the purpose of authorization, only the name matters. + Database db = new Database(qualFunctionName[0], "", "", null); + return new ReadEntity(db, qualFunctionName[1], functionInfo.getClassName(), Type.FUNCTION); + } + private static void getTablePartitionUsedColumns(HiveOperation op, BaseSemanticAnalyzer sem, Map> tab2Cols, Map> part2Cols, Map tableUsePartLevelAuth) throws HiveException { @@ -1131,8 +1173,8 @@ private static void getTablePartitionUsedColumns(HiveOperation op, BaseSemanticA } } - private static void doAuthorizationV2(SessionState ss, HiveOperation op, Set inputs, - Set outputs, String command, Map> tab2cols, + private static void doAuthorizationV2(SessionState ss, HiveOperation op, List inputs, + List outputs, String command, Map> tab2cols, Map> updateTab2Cols) throws HiveException { /* comment for reviewers -> updateTab2Cols needed to be separate from tab2cols because if I @@ -1153,7 +1195,7 @@ private static void doAuthorizationV2(SessionState ss, HiveOperation op, Set getHivePrivObjects( - Set privObjects, Map> tableName2Cols) { + List privObjects, Map> tableName2Cols) { List hivePrivobjs = new ArrayList(); if(privObjects == null){ return hivePrivobjs; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java index 19328c2a09..09009764cd 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java @@ -318,7 +318,9 @@ public FunctionInfo getFunctionInfo(String functionName) throws SemanticExceptio try { functionName = functionName.toLowerCase(); if (FunctionUtils.isQualifiedFunctionName(functionName)) { - return getQualifiedFunctionInfoUnderLock(functionName); + FunctionInfo functionInfo = getQualifiedFunctionInfoUnderLock(functionName); + addToCurrentFunctions(functionName, functionInfo); + return functionInfo; } // First try without qualifiers - would resolve builtin/temp functions. // Otherwise try qualifying with current db name. @@ -327,17 +329,24 @@ public FunctionInfo getFunctionInfo(String functionName) throws SemanticExceptio throw new SemanticException ("UDF " + functionName + " is not allowed"); } if (functionInfo == null) { - String qualifiedName = FunctionUtils.qualifyFunctionName( + functionName = FunctionUtils.qualifyFunctionName( functionName, SessionState.get().getCurrentDatabase().toLowerCase()); - functionInfo = getQualifiedFunctionInfoUnderLock(qualifiedName); + functionInfo = getQualifiedFunctionInfoUnderLock(functionName); } - return functionInfo; + addToCurrentFunctions(functionName, functionInfo); + return functionInfo; } finally { lock.unlock(); } } + private void addToCurrentFunctions(String functionName, FunctionInfo functionInfo) { + if (SessionState.get() != null && functionInfo != null) { + SessionState.get().getCurrentFunctionsInUse().put(functionName, functionInfo); + } + } + public WindowFunctionInfo getWindowFunctionInfo(String functionName) throws SemanticException { FunctionInfo info = getFunctionInfo(WINDOW_FUNC_PREFIX + functionName); if (info instanceof WindowFunctionInfo) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java index 7b654c5d03..33bb639aa5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java @@ -27,6 +27,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.ql.hooks.Entity.Type; +import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; @@ -108,6 +110,19 @@ public ReadEntity(Table t, ReadEntity parent, boolean isDirect) { this.isDirect = isDirect; } + /** + * Constructor for objects represented as String. Currently applicable only + * for function names. + * + * @param db + * @param objName + * @param className + * @param type + */ + public ReadEntity(Database db, String objName, String className, Type type) { + super(db, objName, className, type); + } + /** * Constructor given a partition. * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index a55e66b4b6..799ca65880 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -325,6 +325,8 @@ public HivePrivilegeObjectType getObjectType() { adminPrivOps.add(HiveOperationType.CREATE_MAPPING); adminPrivOps.add(HiveOperationType.ALTER_MAPPING); adminPrivOps.add(HiveOperationType.DROP_MAPPING); + adminPrivOps.add(HiveOperationType.CREATEFUNCTION); + adminPrivOps.add(HiveOperationType.DROPFUNCTION); // operations require select priv op2Priv.put(HiveOperationType.SHOWCOLUMNS, PrivRequirement.newIOPrivRequirement diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java index 4e456e7f08..a9fae4f913 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java @@ -129,13 +129,17 @@ private void checkPrivileges(HiveOperationType hiveOpType, List currentFunctionsInUse = new HashMap<>(); + /** * CURRENT_TIMESTAMP value for query */ @@ -1990,6 +1997,11 @@ public KillQuery getKillQuery() { public void addCleanupItem(Closeable item) { cleanupItems.add(item); } + + public Map getCurrentFunctionsInUse() { + return currentFunctionsInUse; + } + } class ResourceMaps {