diff --git cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java index caf5200..3e7000d 100644 --- cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java +++ cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java @@ -537,7 +537,7 @@ public void processSelectDatabase(CliSessionState ss) throws IOException { // We add Hive function names // For functions that aren't infix operators, we add an open // parenthesis at the end. - for (String s : FunctionRegistry.getFunctionNames()) { + for (String s : FunctionRegistry.getFunctionNames(".*")) { if (s.matches("[a-z_]+")) { sc.addCandidateString(s + "("); } else { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 96becfe..9799e9e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -174,6 +174,9 @@ static Map windowFunctions = Collections.synchronizedMap(new LinkedHashMap()); + private static boolean haveSearchedMetastore = false; + private static final Set metastoreFunctions = new HashSet(); + static { registerGenericUDF("concat", GenericUDFConcat.class); registerUDF("substr", UDFSubstr.class, false); @@ -663,8 +666,12 @@ public static FunctionInfo getFunctionInfo(String functionName) { } private static Set getFunctionNames(boolean searchMetastore) { - Set functionNames = mFunctions.keySet(); - if (searchMetastore) { + Set functionNames = new HashSet(); + for (String func : mFunctions.keySet()) { + functionNames.add(func); + } + + if (searchMetastore && !haveSearchedMetastore) { functionNames = new HashSet(functionNames); try { Hive db = getHive(); @@ -673,14 +680,17 @@ public static FunctionInfo getFunctionInfo(String functionName) { for (String dbName : dbNames) { List funcNames = db.getFunctions(dbName, "*"); for (String funcName : funcNames) { - functionNames.add(FunctionUtils.qualifyFunctionName(funcName, dbName)); + metastoreFunctions.add(FunctionUtils.qualifyFunctionName(funcName, dbName)) } } } catch (Exception e) { LOG.error(e); // Continue on, we can still return the functions we've gotten to this point. } + haveSearchedMetastore = true; } + + functionNames.addAll(metastoreFunctions); return functionNames; } @@ -710,6 +720,11 @@ public static Hive getHive() throws HiveException { funcNames.add(funcName); } } + for (String funcName : metastoreFunctions) { + if (funcPattern.matcher(funcName).matches()) { + funcNames.add(funcName); + } + } return funcNames; }