diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 68a25e0..a82ef8b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; @@ -31,6 +32,7 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.FunctionUtils; import org.apache.hadoop.hive.ql.exec.TaskFactory; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc; @@ -80,6 +82,8 @@ private void analyzeCreateFunction(ASTNode ast) throws SemanticException { CreateFunctionDesc desc = new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources); rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf)); + + addEntities(functionName, isTemporaryFunction); } private void analyzeDropFunction(ASTNode ast) throws SemanticException { @@ -103,6 +107,8 @@ private void analyzeDropFunction(ASTNode ast) throws SemanticException { boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null); DropFunctionDesc desc = new DropFunctionDesc(functionName, isTemporaryFunction); rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf)); + + addEntities(functionName, isTemporaryFunction); } private ResourceType getResourceType(ASTNode token) throws SemanticException { @@ -144,4 +150,25 @@ private ResourceType getResourceType(ASTNode token) throws SemanticException { return resources; } + + /** + * Add write entities to the semantic analyzer to restrict function creation to priviliged users. + */ + private void addEntities(String functionName, boolean isTemporaryFunction) + throws SemanticException { + Database database = null; + if (!isTemporaryFunction) { + try { + String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName); + String dbName = qualifiedNameParts[0]; + database = getDatabase(dbName); + } catch (HiveException e) { + LOG.error(e); + throw new SemanticException(e); + } + } + if (database != null) { + outputs.add(new WriteEntity(database)); + } + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java index 7dfd574..e1186f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java +++ ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java @@ -169,10 +169,10 @@ op2Priv.put(HiveOperationType.SHOWINDEXES, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWPARTITIONS, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.SHOWLOCKS, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.CREATEFUNCTION, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.DROPFUNCTION, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.CREATEMACRO, new InOutPrivs(null, null)); - op2Priv.put(HiveOperationType.DROPMACRO, new InOutPrivs(null, null)); + op2Priv.put(HiveOperationType.CREATEFUNCTION, new InOutPrivs(null, ADMIN_PRIV_AR)); + op2Priv.put(HiveOperationType.DROPFUNCTION, new InOutPrivs(null, ADMIN_PRIV_AR)); + op2Priv.put(HiveOperationType.CREATEMACRO, new InOutPrivs(null, ADMIN_PRIV_AR)); + op2Priv.put(HiveOperationType.DROPMACRO, new InOutPrivs(null, ADMIN_PRIV_AR)); op2Priv.put(HiveOperationType.LOCKTABLE, new InOutPrivs(null, null)); op2Priv.put(HiveOperationType.UNLOCKTABLE, new InOutPrivs(null, null)); diff --git ql/src/test/queries/clientnegative/authorization_create_func1.q ql/src/test/queries/clientnegative/authorization_create_func1.q new file mode 100644 index 0000000..1a974ca --- /dev/null +++ ql/src/test/queries/clientnegative/authorization_create_func1.q @@ -0,0 +1,7 @@ +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=hive_test_user; + +-- permanent function creation should fail for non-admin roles +create function perm_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii'; diff --git ql/src/test/queries/clientpositive/authorization_create_func1.q ql/src/test/queries/clientpositive/authorization_create_func1.q new file mode 100644 index 0000000..47ec439 --- /dev/null +++ ql/src/test/queries/clientpositive/authorization_create_func1.q @@ -0,0 +1,14 @@ +set hive.users.in.admin.role=hive_admin_user; +set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator; +set hive.security.authorization.enabled=true; +set user.name=hive_admin_user; + +-- admin required for create function +set role ADMIN; + +create temporary function temp_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii'; +create function perm_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii'; + +drop temporary function temp_fn; +drop function perm_fn; diff --git ql/src/test/results/clientnegative/authorization_create_func1.q.out ql/src/test/results/clientnegative/authorization_create_func1.q.out new file mode 100644 index 0000000..7c72092 --- /dev/null +++ ql/src/test/results/clientnegative/authorization_create_func1.q.out @@ -0,0 +1 @@ +FAILED: HiveAccessControlException Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=DATABASE, name=default] : [ADMIN PRIVILEGE] diff --git ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out index 393a3e8..fcd5ce7 100644 --- ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out +++ ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out @@ -1,4 +1,5 @@ PREHOOK: query: create function default.badfunc as 'my.nonexistent.class' PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default FAILED: Class my.nonexistent.class not found FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask diff --git ql/src/test/results/clientnegative/create_function_nonexistent_db.q.out ql/src/test/results/clientnegative/create_function_nonexistent_db.q.out index ebb069e..f582d41 100644 --- ql/src/test/results/clientnegative/create_function_nonexistent_db.q.out +++ ql/src/test/results/clientnegative/create_function_nonexistent_db.q.out @@ -1,3 +1 @@ -PREHOOK: query: create function nonexistentdb.badfunc as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper' -PREHOOK: type: CREATEFUNCTION -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. NoSuchObjectException(message:There is no database named nonexistentdb) +FAILED: SemanticException org.apache.hadoop.hive.ql.parse.SemanticException: Database does not exist: nonexistentdb diff --git ql/src/test/results/clientnegative/create_function_nonudf_class.q.out ql/src/test/results/clientnegative/create_function_nonudf_class.q.out index dd66afc..26565be 100644 --- ql/src/test/results/clientnegative/create_function_nonudf_class.q.out +++ ql/src/test/results/clientnegative/create_function_nonudf_class.q.out @@ -1,4 +1,5 @@ PREHOOK: query: create function default.badfunc as 'java.lang.String' PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default FAILED: Class java.lang.String does not implement UDF, GenericUDF, or UDAF FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask diff --git ql/src/test/results/clientnegative/udf_local_resource.q.out ql/src/test/results/clientnegative/udf_local_resource.q.out index b6ea77d..9e6b09b 100644 --- ql/src/test/results/clientnegative/udf_local_resource.q.out +++ ql/src/test/results/clientnegative/udf_local_resource.q.out @@ -1,3 +1,4 @@ PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFileLookup' using file '../../data/files/sales.txt' PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. Hive warehouse is non-local, but ../../data/files/sales.txt specifies file on local filesystem. Resources on non-local warehouse should specify a non-local scheme/path diff --git ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out index ad70d54..06a49e4 100644 --- ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out +++ ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out @@ -1,4 +1,5 @@ PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFileLookup' using file 'nonexistent_file.txt' PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default nonexistent_file.txt does not exist FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. Unable to load FILE nonexistent_file.txt diff --git ql/src/test/results/clientpositive/authorization_create_func1.q.out ql/src/test/results/clientpositive/authorization_create_func1.q.out new file mode 100644 index 0000000..e09e30f --- /dev/null +++ ql/src/test/results/clientpositive/authorization_create_func1.q.out @@ -0,0 +1,26 @@ +PREHOOK: query: -- admin required for create function +set role ADMIN +PREHOOK: type: SHOW_ROLES +POSTHOOK: query: -- admin required for create function +set role ADMIN +POSTHOOK: type: SHOW_ROLES +PREHOOK: query: create temporary function temp_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii' +PREHOOK: type: CREATEFUNCTION +POSTHOOK: query: create temporary function temp_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii' +POSTHOOK: type: CREATEFUNCTION +PREHOOK: query: create function perm_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii' +PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default +POSTHOOK: query: create function perm_fn as 'org.apache.hadoop.hive.ql.udf.UDFAscii' +POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: database:default +PREHOOK: query: drop temporary function temp_fn +PREHOOK: type: DROPFUNCTION +POSTHOOK: query: drop temporary function temp_fn +POSTHOOK: type: DROPFUNCTION +PREHOOK: query: drop function perm_fn +PREHOOK: type: DROPFUNCTION +PREHOOK: Output: database:default +POSTHOOK: query: drop function perm_fn +POSTHOOK: type: DROPFUNCTION +POSTHOOK: Output: database:default diff --git ql/src/test/results/clientpositive/create_func1.q.out ql/src/test/results/clientpositive/create_func1.q.out index 5a249c3..62ca263 100644 --- ql/src/test/results/clientpositive/create_func1.q.out +++ ql/src/test/results/clientpositive/create_func1.q.out @@ -15,8 +15,10 @@ POSTHOOK: query: create database mydb POSTHOOK: type: CREATEDATABASE PREHOOK: query: create function mydb.func1 as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper' PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:mydb POSTHOOK: query: create function mydb.func1 as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper' POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: database:mydb PREHOOK: query: show functions mydb.func1 PREHOOK: type: SHOWFUNCTIONS POSTHOOK: query: show functions mydb.func1 @@ -33,8 +35,10 @@ POSTHOOK: Input: default@src ABC PREHOOK: query: drop function mydb.func1 PREHOOK: type: DROPFUNCTION +PREHOOK: Output: database:mydb POSTHOOK: query: drop function mydb.func1 POSTHOOK: type: DROPFUNCTION +POSTHOOK: Output: database:mydb PREHOOK: query: -- function should now be gone show functions mydb.func1 PREHOOK: type: SHOWFUNCTIONS @@ -44,9 +48,11 @@ POSTHOOK: type: SHOWFUNCTIONS PREHOOK: query: -- To test function name resolution create function mydb.qtest_get_java_boolean as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper' PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:mydb POSTHOOK: query: -- To test function name resolution create function mydb.qtest_get_java_boolean as 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper' POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: database:mydb PREHOOK: query: use default PREHOOK: type: SWITCHDATABASE POSTHOOK: query: use default @@ -79,8 +85,10 @@ POSTHOOK: Input: default@src ABC NULL ABC PREHOOK: query: drop function mydb.qtest_get_java_boolean PREHOOK: type: DROPFUNCTION +PREHOOK: Output: database:mydb POSTHOOK: query: drop function mydb.qtest_get_java_boolean POSTHOOK: type: DROPFUNCTION +POSTHOOK: Output: database:mydb PREHOOK: query: drop database mydb cascade PREHOOK: type: DROPDATABASE PREHOOK: Input: database:mydb diff --git ql/src/test/results/clientpositive/udf_using.q.out ql/src/test/results/clientpositive/udf_using.q.out index 69e5f3b..6cb9a6c 100644 --- ql/src/test/results/clientpositive/udf_using.q.out +++ ql/src/test/results/clientpositive/udf_using.q.out @@ -1,7 +1,9 @@ #### A masked pattern was here #### PREHOOK: type: CREATEFUNCTION +PREHOOK: Output: database:default #### A masked pattern was here #### POSTHOOK: type: CREATEFUNCTION +POSTHOOK: Output: database:default PREHOOK: query: create table udf_using (c1 string) PREHOOK: type: CREATETABLE POSTHOOK: query: create table udf_using (c1 string) @@ -38,7 +40,9 @@ POSTHOOK: Output: default@udf_using POSTHOOK: Lineage: udf_using.c1 SIMPLE [] PREHOOK: query: drop function lookup PREHOOK: type: DROPFUNCTION +PREHOOK: Output: database:default POSTHOOK: query: drop function lookup POSTHOOK: type: DROPFUNCTION +POSTHOOK: Output: database:default POSTHOOK: Lineage: udf_using.c1 SIMPLE [] #### A masked pattern was here ####