diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index 49eb83f..28d0e1c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2452,9 +2452,6 @@ private String fetchFilesNotInLocalFilesystem(String cmd) { if (SessionState.canDownloadResource(progName)) { String filePath = ss.add_resource(ResourceType.FILE, progName, true); - if (filePath == null) { - throw new RuntimeException("Could not download the resource: " + progName); - } Path p = new Path(filePath); String fileName = p.getName(); String scriptArgs = getScriptArgs(cmd); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java index f3f3619..4b2c1ad 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java @@ -20,6 +20,7 @@ import java.io.Serializable; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.apache.commons.lang.builder.HashCodeBuilder; @@ -236,15 +237,19 @@ public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, if (requiredJars != null) { SessionState.ResourceType t = SessionState.find_resource_type("JAR"); - for (String jarPath : requiredJars) { - ss.add_resource(t, jarPath); + try { + ss.add_resources(t, Arrays.asList(requiredJars)); + } catch (Exception e) { + throw new UDFArgumentException(e); } } if (requiredFiles != null) { SessionState.ResourceType t = SessionState.find_resource_type("FILE"); - for (String filePath : requiredFiles) { - ss.add_resource(t, filePath); + try { + ss.add_resources(t, Arrays.asList(requiredFiles)); + } catch (Exception e) { + throw new UDFArgumentException(e); } } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java index 5ed1e6c..0532666 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.processors; +import java.util.Arrays; + import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -50,14 +52,12 @@ public CommandProcessorResponse run(String command) { + "] []*"); return new CommandProcessorResponse(1); } - for (int i = 1; i < tokens.length; i++) { - String resourceFile = ss.add_resource(t, tokens[i]); - if(resourceFile == null){ - String errMsg = tokens[i]+" does not exist."; - return new CommandProcessorResponse(1,errMsg,null); - } + try { + ss.add_resources(t, + Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length))); + } catch (Exception e) { + return new CommandProcessorResponse(1, e.getMessage(), null); } - return new CommandProcessorResponse(0); } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java index 83fadeb..bfac5f8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hive.ql.processors; +import java.util.Arrays; + import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -52,11 +54,9 @@ public CommandProcessorResponse run(String command) { } if (tokens.length >= 2) { - for (int i = 1; i < tokens.length; i++) { - ss.delete_resource(t, tokens[i]); - } + ss.delete_resources(t, Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length))); } else { - ss.delete_resource(t); + ss.delete_resources(t); } return new CommandProcessorResponse(0); diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index f8fc054..7feba1d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -26,6 +26,7 @@ import java.net.URI; import java.text.SimpleDateFormat; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -560,26 +561,6 @@ public static LogHelper getConsole() { return _console; } - public static String validateFile(Set curFiles, String newFile) { - SessionState ss = SessionState.get(); - LogHelper console = getConsole(); - Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); - - try { - if (Utilities.realFile(newFile, conf) != null) { - return newFile; - } else { - console.printError(newFile + " does not exist"); - return null; - } - } catch (IOException e) { - console.printError("Unable to validate " + newFile + "\nException: " - + e.getMessage(), "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return null; - } - } - /** * * @return username from current SessionState authenticator. username will be @@ -593,27 +574,42 @@ public static String getUserFromAuthenticator() { return null; } - public static boolean registerJar(String newJar) { + static void validateFiles(List newFiles) throws IllegalArgumentException { + SessionState ss = SessionState.get(); + Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); + + LogHelper console = getConsole(); + for (String newFile : newFiles) { + try { + if (Utilities.realFile(newFile, conf) == null) { + String message = newFile + " does not exist"; + throw new IllegalArgumentException(message); + } + } catch (IOException e) { + String message = "Unable to validate " + newFile; + throw new IllegalArgumentException(message, e); + } + } + } + + static void registerJars(List newJars) throws IllegalArgumentException { LogHelper console = getConsole(); try { ClassLoader loader = Thread.currentThread().getContextClassLoader(); - ClassLoader newLoader = Utilities.addToClassPath(loader, StringUtils.split(newJar, ",")); + ClassLoader newLoader = Utilities.addToClassPath(loader, newJars.toArray(new String[0])); Thread.currentThread().setContextClassLoader(newLoader); SessionState.get().getConf().setClassLoader(newLoader); - console.printInfo("Added " + newJar + " to class path"); - return true; + console.printInfo("Added " + newJars + " to class path"); } catch (Exception e) { - console.printError("Unable to register " + newJar + "\nException: " - + e.getMessage(), "\n" - + org.apache.hadoop.util.StringUtils.stringifyException(e)); - return false; + String message = "Unable to register " + newJars; + throw new IllegalArgumentException(message, e); } } - public static boolean unregisterJar(String jarsToUnregister) { + static boolean unregisterJar(List jarsToUnregister) { LogHelper console = getConsole(); try { - Utilities.removeFromClassPath(StringUtils.split(jarsToUnregister, ",")); + Utilities.removeFromClassPath(jarsToUnregister.toArray(new String[0])); console.printInfo("Deleted " + jarsToUnregister + " from class path"); return true; } catch (Exception e) { @@ -625,65 +621,29 @@ public static boolean unregisterJar(String jarsToUnregister) { } /** - * ResourceHook. - * - */ - public static interface ResourceHook { - String preHook(Set cur, String s); - - boolean postHook(Set cur, String s); - } - - /** * ResourceType. * */ public static enum ResourceType { - FILE(new ResourceHook() { - @Override - public String preHook(Set cur, String s) { - return validateFile(cur, s); - } - - @Override - public boolean postHook(Set cur, String s) { - return true; - } - }), - - JAR(new ResourceHook() { - @Override - public String preHook(Set cur, String s) { - String newJar = validateFile(cur, s); - if (newJar != null) { - return (registerJar(newJar) ? newJar : null); - } else { - return null; - } - } - - @Override - public boolean postHook(Set cur, String s) { - return unregisterJar(s); - } - }), + FILE, - ARCHIVE(new ResourceHook() { + JAR { @Override - public String preHook(Set cur, String s) { - return validateFile(cur, s); + public void preHook(Set cur, List s) throws IllegalArgumentException { + super.preHook(cur, s); + registerJars(s); } - @Override - public boolean postHook(Set cur, String s) { - return true; + public void postHook(Set cur, List s) { + unregisterJar(s); } - }); + }, + ARCHIVE; - public ResourceHook hook; - - ResourceType(ResourceHook hook) { - this.hook = hook; + public void preHook(Set cur, List s) throws IllegalArgumentException { + validateFiles(s); + } + public void postHook(Set cur, List s) { } }; @@ -713,33 +673,47 @@ public static ResourceType find_resource_type(String s) { private final HashMap> resource_map = new HashMap>(); - public String add_resource(ResourceType t, String value) { - // By default don't convert to unix + public String add_resource(ResourceType t, String value) throws RuntimeException { return add_resource(t, value, false); } - public String add_resource(ResourceType t, String value, boolean convertToUnix) { - try { - value = downloadResource(value, convertToUnix); - } catch (Exception e) { - getConsole().printError(e.getMessage()); + public String add_resource(ResourceType t, String value, boolean convertToUnix) + throws RuntimeException { + List added = add_resources(t, Arrays.asList(value), convertToUnix); + if (added == null || added.isEmpty()) { return null; } + return added.get(0); + } + public List add_resources(ResourceType t, List values) + throws RuntimeException { + // By default don't convert to unix + return add_resources(t, values, false); + } + + public List add_resources(ResourceType t, List values, boolean convertToUnix) + throws RuntimeException { Set resourceMap = getResourceMap(t); - String fnlVal = value; - if (t.hook != null) { - fnlVal = t.hook.preHook(resourceMap, value); - if (fnlVal == null) { - return fnlVal; + List localized = new ArrayList(); + try { + for (String value : values) { + localized.add(downloadResource(value, convertToUnix)); } + + t.preHook(resourceMap, localized); + + } catch (RuntimeException e) { + getConsole().printError(e.getMessage(), "\n" + + org.apache.hadoop.util.StringUtils.stringifyException(e)); + throw e; } - getConsole().printInfo("Added resource: " + fnlVal); - resourceMap.add(fnlVal); - addedResource = true; - return fnlVal; + getConsole().printInfo("Added resources: " + values); + resourceMap.addAll(localized); + + return localized; } public void add_builtin_resource(ResourceType t, String value) { @@ -799,16 +773,12 @@ private String downloadResource(String value, boolean convertToUnix) { return value; } - public boolean delete_resource(ResourceType t, String value) { - if (resource_map.get(t) == null) { - return false; + public void delete_resources(ResourceType t, List value) { + Set resources = resource_map.get(t); + if (resources != null && !resources.isEmpty()) { + t.postHook(resources, value); + resources.removeAll(value); } - if (t.hook != null) { - if (!t.hook.postHook(resource_map.get(t), value)) { - return false; - } - } - return (resource_map.get(t).remove(value)); } public Set list_resource(ResourceType t, List filter) { @@ -829,11 +799,10 @@ public boolean delete_resource(ResourceType t, String value) { } } - public void delete_resource(ResourceType t) { - if (resource_map.get(t) != null) { - for (String value : resource_map.get(t)) { - delete_resource(t, value); - } + public void delete_resources(ResourceType t) { + Set resources = resource_map.get(t); + if (resources != null && !resources.isEmpty()) { + delete_resources(t, new ArrayList(resources)); resource_map.remove(t); } } diff --git ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index 7df7ecc..ef0052f 100644 --- ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -101,7 +101,7 @@ public RegisterJarRunnable(String jar, SessionState ss) { public void run() { SessionState.start(ss); - SessionState.registerJar(jar); + SessionState.registerJars(Arrays.asList(jar)); loader = Thread.currentThread().getContextClassLoader(); } } diff --git ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out index 06a49e4..3843428 100644 --- ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out +++ ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out @@ -2,4 +2,4 @@ PREHOOK: query: create function lookup as 'org.apache.hadoop.hive.ql.udf.UDFFile PREHOOK: type: CREATEFUNCTION PREHOOK: Output: database:default nonexistent_file.txt does not exist -FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. Unable to load FILE nonexistent_file.txt +FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.FunctionTask. nonexistent_file.txt does not exist