diff --git itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java new file mode 100644 index 0000000..c60f856 --- /dev/null +++ itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java @@ -0,0 +1,74 @@ +package org.apache.hadoop.hive.ql; + +import static org.junit.Assert.*; + +import java.net.URI; +import java.util.Set; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.hooks.Entity; +import org.apache.hadoop.hive.ql.hooks.WriteEntity; +import org.apache.hadoop.hive.ql.metadata.Hive; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class TestCreateUdfEntities { + private Driver driver; + private String funcName = "print_test"; + + @Before + public void setUp() throws Exception { + + HiveConf conf = new HiveConf(Driver.class); + SessionState.start(conf); + driver = new Driver(conf); + driver.init(); + } + + @After + public void tearDown() throws Exception { + driver.run("drop function " + funcName); + driver.close(); + SessionState.get().close(); + } + + @Test + public void testUdfWithLocalResource() throws Exception { + int rc = driver.compile("CREATE FUNCTION " + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' " + + " using file '" + "file:///tmp/udf1.jar'"); + assertEquals(0, rc); + WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {}); + assertEquals(outputEntities.length, 3); + + assertEquals(Entity.Type.DATABASE, outputEntities[0].getType()); + assertEquals("default", outputEntities[0].getDatabase().getName()); + + assertEquals(Entity.Type.FUNCTION, outputEntities[1].getType()); + assertEquals(funcName, outputEntities[1].getFunctionName()); + + assertEquals(Entity.Type.LOCAL_DIR, outputEntities[2].getType()); + assertEquals("file:/tmp/udf1.jar", outputEntities[2].getLocation().toString()); + } + + @Test + public void testUdfWithDfsResource() throws Exception { + int rc = driver.compile("CREATE FUNCTION default." + funcName + " AS 'org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf' " + + " using file '" + "hdfs:///tmp/udf1.jar'"); + assertEquals(0, rc); + WriteEntity outputEntities[] = driver.getPlan().getOutputs().toArray(new WriteEntity[] {}); + assertEquals(outputEntities.length, 3); + + assertEquals(Entity.Type.DATABASE, outputEntities[0].getType()); + assertEquals("default", outputEntities[0].getDatabase().getName()); + + assertEquals(Entity.Type.FUNCTION, outputEntities[1].getType()); + assertEquals(funcName, outputEntities[1].getFunctionName()); + + assertEquals(Entity.Type.DFS_DIR, outputEntities[2].getType()); + assertEquals("hdfs:/tmp/udf1.jar", outputEntities[2].getLocation().toString()); + } + +} diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index f9b875e..22e5b47 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -22,6 +22,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.Database; @@ -81,7 +83,7 @@ private void analyzeCreateFunction(ASTNode ast) throws SemanticException { new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources); rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf)); - addEntities(functionName, isTemporaryFunction); + addEntities(functionName, isTemporaryFunction, resources); } private void analyzeDropFunction(ASTNode ast) throws SemanticException { @@ -106,7 +108,7 @@ private void analyzeDropFunction(ASTNode ast) throws SemanticException { DropFunctionDesc desc = new DropFunctionDesc(functionName, isTemporaryFunction); rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf)); - addEntities(functionName, isTemporaryFunction); + addEntities(functionName, isTemporaryFunction, null); } private ResourceType getResourceType(ASTNode token) throws SemanticException { @@ -152,8 +154,8 @@ private ResourceType getResourceType(ASTNode token) throws SemanticException { /** * Add write entities to the semantic analyzer to restrict function creation to privileged users. */ - private void addEntities(String functionName, boolean isTemporaryFunction) - throws SemanticException { + private void addEntities(String functionName, boolean isTemporaryFunction, + List resources) throws SemanticException { // If the function is being added under a database 'namespace', then add an entity representing // the database (only applicable to permanent/metastore functions). // We also add a second entity representing the function name. @@ -183,5 +185,13 @@ private void addEntities(String functionName, boolean isTemporaryFunction) // Add the function name as a WriteEntity outputs.add(new WriteEntity(database, functionName, Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK)); + + if (resources != null) { + for (ResourceUri resource : resources) { + String uriPath = resource.getUri(); + outputs.add(new WriteEntity(new Path(uriPath), + FileUtils.isLocalFile(conf, uriPath))); + } + } } }