diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index a8411c9..a479d58 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1751,6 +1751,13 @@ "If set to true (default), the logged-in user determines the fair scheduler queue\n" + "for submitted jobs, so that map reduce resource usage can be tracked by user.\n" + "If set to false, all Hive jobs go to the 'hive' user's queue."), + HIVE_SERVER2_BUILTIN_UDF_WHITELIST("hive.server2.builtin.udf.whitelist", "", + "Comma separated list of builtin udf names allowed in queries.\n" + + "An empty whitelist allows all builtin udfs to be executed. " + + " The udf black list takes precedence over udf white list"), + HIVE_SERVER2_BUILTIN_UDF_BLACKLIST("hive.server2.builtin.udf.blacklist", "", + "Comma separated list of udfs names. These udfs will not be allowed in queries." + + " The udf black list takes precedence over udf white list"), HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,reload,compile", "Comma separated list of non-SQL Hive commands users are authorized to execute"), diff --git itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java index e0b6558..5087f87 100644 --- itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java +++ itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcWithMiniHS2.java @@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.DriverManager; @@ -30,12 +31,14 @@ import java.sql.Statement; import java.util.HashMap; import java.util.Map; +import java.util.Set; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.junit.After; import org.junit.AfterClass; @@ -387,6 +390,110 @@ public void testSessionScratchDirs() throws Exception { verifyScratchDir(conf, fs, scratchDirPath, expectedFSPermission, userName, true); } + /** Test UDF whitelist + * - verify default value + * - verify udf allowed with default whitelist + * - verify udf allowed with specific whitelist + * - verify udf disallowed when not in whitelist + * @throws Exception + */ + @Test + public void testUdfWhiteList() throws Exception { + HiveConf testConf = new HiveConf(); + assertTrue(testConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST).isEmpty()); + // verify that udf in default whitelist can be executed + Statement stmt = hs2Conn.createStatement(); + stmt.executeQuery("SELECT substr('foobar', 4) "); + hs2Conn.close(); + miniHS2.stop(); + + // setup whitelist + Set funcNames = FunctionRegistry.getFunctionNames(); + funcNames.remove("reflect"); + String funcNameStr = ""; + for (String funcName : funcNames) { + funcNameStr += "," + funcName; + } + funcNameStr = funcNameStr.substring(1); // remove ',' at begining + testConf.setVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST, funcNameStr); + miniHS2 = new MiniHS2(testConf); + miniHS2.start(new HashMap()); + + hs2Conn = getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); + stmt = hs2Conn.createStatement(); + // verify that udf in whitelist can be executed + stmt.executeQuery("SELECT substr('foobar', 3) "); + + // verify that udf not in whitelist fails + try { + stmt.executeQuery("SELECT reflect('java.lang.String', 'valueOf', 1) "); + fail("reflect() udf invocation should fail"); + } catch (SQLException e) { + // expected + } + } + + /** Test UDF blacklist + * - verify default value + * - verify udfs allowed with default blacklist + * - verify udf disallowed when in blacklist + * @throws Exception + */ + @Test + public void testUdfBlackList() throws Exception { + HiveConf testConf = new HiveConf(); + assertTrue(testConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST).isEmpty()); + + Statement stmt = hs2Conn.createStatement(); + // verify that udf in default whitelist can be executed + stmt.executeQuery("SELECT substr('foobar', 4) "); + + miniHS2.stop(); + testConf.setVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST, "reflect"); + miniHS2 = new MiniHS2(testConf); + miniHS2.start(new HashMap()); + hs2Conn = getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); + stmt = hs2Conn.createStatement(); + + try { + stmt.executeQuery("SELECT reflect('java.lang.String', 'valueOf', 1) "); + fail("reflect() udf invocation should fail"); + } catch (SQLException e) { + // expected + } + } + + /** Test UDF blacklist overrides whitelist + * @throws Exception + */ + @Test + public void testUdfBlackListOverride() throws Exception { + // setup whitelist + HiveConf testConf = new HiveConf(); + + Set funcNames = FunctionRegistry.getFunctionNames(); + String funcNameStr = ""; + for (String funcName : funcNames) { + funcNameStr += "," + funcName; + } + funcNameStr = funcNameStr.substring(1); // remove ',' at begining + testConf.setVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST, funcNameStr); + testConf.setVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST, "reflect"); + miniHS2 = new MiniHS2(testConf); + miniHS2.start(new HashMap()); + + hs2Conn = getConnection(miniHS2.getJdbcURL(), System.getProperty("user.name"), "bar"); + Statement stmt = hs2Conn.createStatement(); + + // verify that udf in black list fails even though it's included in whitelist + try { + stmt.executeQuery("SELECT reflect('java.lang.String', 'valueOf', 1) "); + fail("reflect() udf invocation should fail"); + } catch (SQLException e) { + // expected + } + } + /** * Tests the creation of the root hdfs scratch dir, which should be writable by all. * diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java index 074255b..bd00bd4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java @@ -47,6 +47,8 @@ private Class tableFunctionResolver; + private boolean blockedFunction; + public FunctionInfo(boolean isNative, String displayName, GenericUDF genericUDF) { this.isNative = isNative; @@ -190,4 +192,13 @@ public boolean isGenericUDTF() { public boolean isTableFunction() { return null != tableFunctionResolver; } + + public boolean isBlockedFunction() { + return blockedFunction; + } + + public void setBlockedFunction(boolean blockedFunction) { + this.blockedFunction = blockedFunction; + } + } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 6323387..5071a5c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -29,6 +29,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import java.util.regex.Pattern; @@ -668,8 +669,12 @@ public static String getNormalizedFunctionName(String fn) { return functionInfo; } - public static FunctionInfo getFunctionInfo(String functionName) { - return getFunctionInfo(mFunctions, functionName); + public static FunctionInfo getFunctionInfo(String functionName) throws SemanticException { + FunctionInfo functionInfo = getFunctionInfo(mFunctions, functionName); + if (functionInfo != null && functionInfo.isBlockedFunction()) { + throw new SemanticException ("UDF " + functionName + " is not allowed"); + } + return functionInfo; } /** @@ -771,7 +776,13 @@ public static Hive getHive() throws HiveException { public static Set getFunctionSynonyms(String funcName) { Set synonyms = new HashSet(); - FunctionInfo funcInfo = getFunctionInfo(funcName); + FunctionInfo funcInfo; + try { + funcInfo = getFunctionInfo(funcName); + } catch (SemanticException e) { + LOG.warn("Failed to load " + funcName); + funcInfo = null; + } if (null == funcInfo) { return synonyms; } @@ -1246,7 +1257,7 @@ public static void unregisterTemporaryUDF(String functionName) throws HiveExcept } } - public static GenericUDAFResolver getGenericUDAFResolver(String functionName) { + public static GenericUDAFResolver getGenericUDAFResolver(String functionName) throws SemanticException { if (LOG.isDebugEnabled()) { LOG.debug("Looking up GenericUDAF: " + functionName); } @@ -1543,16 +1554,18 @@ public static Method getMethodInternal(Class udfClass, List mlist, bo /** * A shortcut to get the "index" GenericUDF. This is used for getting elements * out of array and getting values out of map. + * @throws SemanticException */ public static GenericUDF getGenericUDFForIndex() { - return FunctionRegistry.getFunctionInfo("index").getGenericUDF(); + return FunctionRegistry.getFunctionInfo(mFunctions, "index").getGenericUDF(); } /** * A shortcut to get the "and" GenericUDF. + * @throws SemanticException */ public static GenericUDF getGenericUDFForAnd() { - return FunctionRegistry.getFunctionInfo("and").getGenericUDF(); + return FunctionRegistry.getFunctionInfo(mFunctions, "and").getGenericUDF(); } /** @@ -1924,8 +1937,9 @@ public static WindowFunctionInfo getWindowFunctionInfo(String functionName) { * name of function * @return true if a GenericUDF or GenericUDAF exists for this name and implyOrder is true, false * otherwise. + * @throws SemanticException */ - public static boolean impliesOrder(String functionName) { + public static boolean impliesOrder(String functionName) throws SemanticException { FunctionInfo info = getFunctionInfo(functionName); if (info != null) { @@ -1951,13 +1965,13 @@ static private void addFunctionInfoToWindowFunctions(String functionName, windowFunctions.put(functionName.toLowerCase(), wInfo); } - public static boolean isTableFunction(String name) + public static boolean isTableFunction(String name) throws SemanticException { FunctionInfo tFInfo = getFunctionInfo(name); return tFInfo != null && !tFInfo.isInternalTableFunction() && tFInfo.isTableFunction(); } - public static TableFunctionResolver getTableFunctionResolver(String name) + public static TableFunctionResolver getTableFunctionResolver(String name) throws SemanticException { FunctionInfo tfInfo = getFunctionInfo(name); if(tfInfo.isTableFunction()) { @@ -1966,7 +1980,7 @@ public static TableFunctionResolver getTableFunctionResolver(String name) return null; } - public static TableFunctionResolver getWindowingTableFunction() + public static TableFunctionResolver getWindowingTableFunction() throws SemanticException { return getTableFunctionResolver(WINDOWING_TABLE_FUNCTION); } @@ -1993,8 +2007,9 @@ public static void registerTableFunction(String name, ClasswhiteList, + List blackList) { + for ( Entry funcEntry : mFunctions.entrySet()) { + funcEntry.getValue().setBlockedFunction( + isUdfBlocked(funcEntry.getKey(), whiteList, blackList)); + } + } + + /** + * Check if the function belongs to whitelist or blacklist + * @param functionName + * @param whiteList + * @param blackList + * @return + */ + private static boolean isUdfBlocked(String functionName, + List whiteList, List blackList) { + boolean inWhiteList = false; + boolean inBlackList = false; + + if (whiteList.isEmpty()) { + inWhiteList = true; + } else { + for (String allowedFunction : whiteList) { + if (functionName.equalsIgnoreCase(allowedFunction)) { + inWhiteList = true; + break; + } + } + } + + for (String blockedFunction : blackList) { + if (functionName.equalsIgnoreCase(blockedFunction)) { + inBlackList = true; + break; + } + } + return !inWhiteList || inBlackList; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java index dbc9bb2..54b935f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java @@ -82,6 +82,7 @@ import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor; import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.AggregationDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; @@ -756,7 +757,13 @@ public static boolean isCustomUDF(ExprNodeGenericFuncDesc expr) { if (udfName == null) { return false; } - FunctionInfo funcInfo = FunctionRegistry.getFunctionInfo(udfName); + FunctionInfo funcInfo; + try { + funcInfo = FunctionRegistry.getFunctionInfo(udfName); + } catch (SemanticException e) { + LOG.warn("Failed to load " + udfName, e); + funcInfo = null; + } if (funcInfo == null) { return false; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java index 7f52c29..0994c95 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/optiq/translator/SqlFunctionConverter.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.ParseDriver; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.udf.SettableUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge; @@ -111,13 +112,24 @@ public static GenericUDF getHiveUDF(SqlOperator op, RelDataType dt, int argsLeng name = FunctionRegistry.UNARY_MINUS_FUNC_NAME; } } - FunctionInfo hFn = name != null ? FunctionRegistry.getFunctionInfo(name) : null; + FunctionInfo hFn; + try { + hFn = name != null ? FunctionRegistry.getFunctionInfo(name) : null; + } catch (SemanticException e) { + LOG.warn("Failed to load udf " + name, e); + hFn = null; + } if (hFn == null) - hFn = handleExplicitCast(op, dt); + try { + hFn = handleExplicitCast(op, dt); + } catch (SemanticException e) { + LOG.warn("Failed to load udf " + name, e); + hFn = null; + } return hFn == null ? null : hFn.getGenericUDF(); } - private static FunctionInfo handleExplicitCast(SqlOperator op, RelDataType dt) { + private static FunctionInfo handleExplicitCast(SqlOperator op, RelDataType dt) throws SemanticException { FunctionInfo castUDF = null; if (op.kind == SqlKind.CAST) { @@ -283,7 +295,13 @@ private static String getName(GenericUDF hiveUDF) { private void registerFunction(String name, SqlOperator optiqFn, HiveToken hiveToken) { reverseOperatorMap.put(optiqFn, name); - FunctionInfo hFn = FunctionRegistry.getFunctionInfo(name); + FunctionInfo hFn; + try { + hFn = FunctionRegistry.getFunctionInfo(name); + } catch (SemanticException e) { + LOG.warn("Failed to load udf " + name, e); + hFn = null; + } if (hFn != null) { String hFnName = getName(hFn.getGenericUDF()); hiveToOptiq.put(hFnName, optiqFn); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index b105424..31852e6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -2993,9 +2993,9 @@ private void analyzeMetastoreCheck(CommonTree ast) throws SemanticException { } private static ExprNodeGenericFuncDesc makeBinaryPredicate( - String fn, ExprNodeDesc left, ExprNodeDesc right) { - return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, - FunctionRegistry.getFunctionInfo(fn).getGenericUDF(), Lists.newArrayList(left, right)); + String fn, ExprNodeDesc left, ExprNodeDesc right) throws SemanticException { + return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, + FunctionRegistry.getFunctionInfo(fn).getGenericUDF(), Lists.newArrayList(left, right)); } /** diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java index 22e5b47..01b97c8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java @@ -59,7 +59,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException { analyzeCreateFunction(ast); } if (ast.getToken().getType() == HiveParser.TOK_DROPFUNCTION) { - analyzeDropFunction(ast); + analyzeDropFunction(ast); } LOG.info("analyze done"); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java index 5decffb..350a961 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java @@ -690,7 +690,12 @@ public static String getFunctionText(ASTNode expr, boolean isFunction) { static ExprNodeDesc getFuncExprNodeDescWithUdfData(String udfName, TypeInfo typeInfo, ExprNodeDesc... children) throws UDFArgumentException { - FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName); + FunctionInfo fi; + try { + fi = FunctionRegistry.getFunctionInfo(udfName); + } catch (SemanticException e) { + throw new UDFArgumentException(e); + } if (fi == null) { throw new UDFArgumentException(udfName + " not found."); } diff --git ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java index 93981fa..c0fd4b3 100644 --- ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java +++ ql/src/test/org/apache/hadoop/hive/metastore/TestMetastoreExpr.java @@ -189,11 +189,11 @@ public ExprNodeGenericFuncDesc build() throws Exception { return (ExprNodeGenericFuncDesc)stack.pop(); } - public ExprBuilder pred(String name, int args) { + public ExprBuilder pred(String name, int args) throws Exception { return fn(name, TypeInfoFactory.booleanTypeInfo, args); } - private ExprBuilder fn(String name, TypeInfo ti, int args) { + private ExprBuilder fn(String name, TypeInfo ti, int args) throws Exception { List children = new ArrayList(); for (int i = 0; i < args; ++i) { children.add(stack.pop()); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index 0eb7c5a..41862e6 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -181,7 +181,7 @@ private static void fileDiff(String datafile, String testdir) throws Exception { } } - private FilterDesc getTestFilterDesc(String column) { + private FilterDesc getTestFilterDesc(String column) throws Exception { ArrayList children1 = new ArrayList(); children1.add(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, column, "", false)); @@ -208,7 +208,7 @@ private FilterDesc getTestFilterDesc(String column) { } @SuppressWarnings("unchecked") - private void populateMapPlan1(Table src) { + private void populateMapPlan1(Table src) throws Exception { Operator op2 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator + "mapplan1.out"), Utilities.defaultTd, true)); @@ -219,7 +219,7 @@ private void populateMapPlan1(Table src) { } @SuppressWarnings("unchecked") - private void populateMapPlan2(Table src) { + private void populateMapPlan2(Table src) throws Exception { Operator op3 = OperatorFactory.get(new FileSinkDesc(new Path(tmpdir + File.separator + "mapplan2.out"), Utilities.defaultTd, false)); @@ -267,7 +267,7 @@ private void populateMapRedPlan1(Table src) throws SemanticException { } @SuppressWarnings("unchecked") - private void populateMapRedPlan2(Table src) throws SemanticException { + private void populateMapRedPlan2(Table src) throws Exception { ArrayList outputColumns = new ArrayList(); for (int i = 0; i < 2; i++) { outputColumns.add("_col" + i); @@ -423,7 +423,7 @@ private void populateMapRedPlan5(Table src) throws SemanticException { } @SuppressWarnings("unchecked") - private void populateMapRedPlan6(Table src) throws SemanticException { + private void populateMapRedPlan6(Table src) throws Exception { // map-side work ArrayList outputColumns = new ArrayList(); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java index 1bb6eaf..6db3c19 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java @@ -122,12 +122,12 @@ public void testExprNodeColumnEvaluator() throws Throwable { } } - private static ExprNodeDesc getListIndexNode(ExprNodeDesc node, int index) { + private static ExprNodeDesc getListIndexNode(ExprNodeDesc node, int index) throws Exception { return getListIndexNode(node, new ExprNodeConstantDesc(index)); } private static ExprNodeDesc getListIndexNode(ExprNodeDesc node, - ExprNodeDesc index) { + ExprNodeDesc index) throws Exception { ArrayList children = new ArrayList(2); children.add(node); children.add(index); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java index 655c3d0..068bdee 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestFunctionRegistry.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -379,7 +380,7 @@ public void testGetTypeInfoForPrimitiveCategory() { protected void tearDown() { } - public void testIsRankingFunction() { + public void testIsRankingFunction() throws Exception { Assert.assertTrue(FunctionRegistry.isRankingFunction("rank")); Assert.assertTrue(FunctionRegistry.isRankingFunction("dense_rank")); Assert.assertTrue(FunctionRegistry.isRankingFunction("percent_rank")); @@ -387,7 +388,7 @@ public void testIsRankingFunction() { Assert.assertFalse(FunctionRegistry.isRankingFunction("min")); } - public void testImpliesOrder() { + public void testImpliesOrder() throws Exception { Assert.assertTrue(FunctionRegistry.impliesOrder("rank")); Assert.assertTrue(FunctionRegistry.impliesOrder("dense_rank")); Assert.assertTrue(FunctionRegistry.impliesOrder("percent_rank")); diff --git service/src/java/org/apache/hive/service/cli/CLIService.java service/src/java/org/apache/hive/service/cli/CLIService.java index f5751f1..1ef4f77 100644 --- service/src/java/org/apache/hive/service/cli/CLIService.java +++ service/src/java/org/apache/hive/service/cli/CLIService.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.shims.ShimLoader; @@ -46,6 +47,8 @@ import org.apache.hive.service.cli.thrift.TProtocolVersion; import org.apache.hive.service.server.HiveServer2; +import com.google.common.collect.Lists; + /** * CLIService. * @@ -104,9 +107,22 @@ public synchronized void init(HiveConf hiveConf) { } } } + setupBlockedUdfs(); super.init(hiveConf); } + private void setupBlockedUdfs() { + String whiteListVal = hiveConf.getVar(ConfVars.HIVE_SERVER2_BUILTIN_UDF_WHITELIST); + List whiteList = Lists.newArrayList(); + if (!whiteListVal.isEmpty()) { + whiteList = Lists.newArrayList(whiteListVal.split(",")); + } + List blackList = Lists.newArrayList(hiveConf.getVar( + ConfVars.HIVE_SERVER2_BUILTIN_UDF_BLACKLIST).split(",")); + + FunctionRegistry.setupPermissionsForBuiltinUDFs(whiteList, blackList); + } + public UserGroupInformation getServiceUGI() { return this.serviceUGI; }