diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 7f4afd9..23404da 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -572,6 +572,11 @@ HIVEJAR("hive.jar.path", "", ""), HIVEAUXJARS("hive.aux.jars.path", "", ""), + // reloadable jars + HIVERELOADABLEJARS("hive.reloadable.aux.jars.path", "", + "Jars can be renewed by executing reload command. And these jars can be " + + "used as the auxiliary classes like creating a UDF or SerDe."), + // hive added files and jars HIVEADDEDFILES("hive.added.files.path", "", ""), HIVEADDEDJARS("hive.added.jars.path", "", ""), @@ -1576,7 +1581,7 @@ HIVE_SERVER2_SSL_KEYSTORE_PATH("hive.server2.keystore.path", "", ""), HIVE_SERVER2_SSL_KEYSTORE_PASSWORD("hive.server2.keystore.password", "", ""), - HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,compile", + HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,reload,compile", "Comma separated list of non-SQL Hive commands users are authorized to execute"), HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java index 93a03ad..4fdb5c9 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -425,7 +426,7 @@ public static HiveStorageHandler getStorageHandler(Configuration conf, try { Class handlerClass = (Class) Class - .forName(storageHandler, true, JavaUtils.getClassLoader()); + .forName(storageHandler, true, Utilities.getSessionSpecifiedClassLoader()); return (HiveStorageHandler) ReflectionUtils.newInstance( handlerClass, conf); } catch (ClassNotFoundException e) { diff --git hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java index f25039d..ccad819 100644 --- hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java +++ hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java @@ -22,8 +22,8 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.api.PartitionEventType; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; @@ -49,7 +49,7 @@ public static HCatClient create(Configuration conf) throws HCatException { HCatClientHMSImpl.class.getName()); try { Class clientClass = Class.forName(className, - true, JavaUtils.getClassLoader()).asSubclass( + true, Utilities.getSessionSpecifiedClassLoader()).asSubclass( HCatClient.class); client = (HCatClient) clientClass.newInstance(); } catch (ClassNotFoundException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java index 5924bcf..928be42 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java @@ -53,7 +53,7 @@ public void initialize(Configuration hconf, Properties props) throws HiveExcepti private SerDe initializeSerde(Configuration conf, Properties props) throws Exception { String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); Class serdeClass = Class.forName(serdeName, true, - JavaUtils.getClassLoader()).asSubclass(SerDe.class); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(SerDe.class); // cast only needed for Hadoop 0.17 compatibility SerDe serde = ReflectionUtils.newInstance(serdeClass, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 0c6a3d4..dbb8eaf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -39,7 +39,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Function; @@ -562,7 +561,7 @@ private static FunctionInfo getFunctionInfoFromMetastore(String functionName) { return null; } - Class udfClass = Class.forName(func.getClassName(), true, JavaUtils.getClassLoader()); + Class udfClass = Class.forName(func.getClassName(), true, Utilities.getSessionSpecifiedClassLoader()); if (registerTemporaryFunction(functionName, udfClass)) { ret = mFunctions.get(functionName); } else { @@ -610,7 +609,7 @@ private static void checkFunctionClass(CommonFunctionInfo cfi) throws ClassNotFo // Even if we have a reference to the class (which will be the case for GenericUDFs), // the classloader may not be able to resolve the class, which would mean reflection-based // methods would fail such as for plan deserialization. Make sure this works too. - Class.forName(udfClass.getName(), true, JavaUtils.getClassLoader()); + Class.forName(udfClass.getName(), true, Utilities.getSessionSpecifiedClassLoader()); } private static void loadFunctionResourcesIfNecessary(String functionName, CommonFunctionInfo cfi) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java index bd45df1..569c125 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java @@ -21,7 +21,6 @@ import static org.apache.hadoop.util.StringUtils.stringifyException; import java.io.IOException; -import java.net.URI; import java.util.List; import org.apache.commons.logging.Log; @@ -33,10 +32,8 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.FunctionUtils.FunctionType; import org.apache.hadoop.hive.ql.exec.FunctionUtils.UDFClassType; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -47,10 +44,6 @@ import org.apache.hadoop.hive.ql.plan.FunctionWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; -import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; /** @@ -308,9 +301,10 @@ public static void addFunctionResources(List resources) throws Hive } } - @SuppressWarnings("unchecked") private Class getUdfClass(CreateFunctionDesc desc) throws ClassNotFoundException { - return Class.forName(desc.getClassName(), true, JavaUtils.getClassLoader()); + // get the session specified class loader from SessionState + ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader(); + return Class.forName(desc.getClassName(), true, classLoader); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java index dcc19f7..25797c6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java @@ -57,7 +57,7 @@ private FetchFormatter initializeFetcher(Configuration conf) throws Exception { FetchFormatter fetcher; if (formatterName != null && !formatterName.isEmpty()) { Class fetcherClass = Class.forName(formatterName, true, - JavaUtils.getClassLoader()).asSubclass(FetchFormatter.class); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(FetchFormatter.class); fetcher = ReflectionUtils.newInstance(fetcherClass, null); } else { fetcher = new DefaultFetchFormatter(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 76fee61..81c5c88 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -1909,6 +1909,26 @@ public static String getResourceFiles(Configuration conf, SessionState.ResourceT } /** + * get session specified class loader and get current class loader if fall + * + * @return + */ + public static ClassLoader getSessionSpecifiedClassLoader() { + SessionState state = SessionState.get(); + if (state == null || state.getConf() == null) { + LOG.debug("Hive Conf not found or Session not initiated, use thread based class loader instead"); + return JavaUtils.getClassLoader(); + } + ClassLoader sessionCL = state.getConf().getClassLoader(); + if (sessionCL != null){ + LOG.debug("Use session specified class loader"); + return sessionCL; + } + LOG.debug("Session specified class loader not found, use thread based class loader"); + return JavaUtils.getClassLoader(); + } + + /** * Create a URL from a string representing a path to a local file. * The path string can be just a path, or can start with file:/, file:/// * @param onestr path string @@ -1928,6 +1948,33 @@ private static URL urlFromPathString(String onestr) { return oneurl; } + /** + * get the jar files from specified directory or get jar files by several jar names sperated by comma + * @param path + * @return + */ + public static Set getJarFilesByPath(String path){ + Set result = new HashSet(); + if (path == null || path.isEmpty()) { + return result; + } + + File paths = new File(path); + if (paths.exists() && paths.isDirectory()) { + // add all jar files under the reloadable auxiliary jar paths + Set jarFiles = new HashSet(); + jarFiles.addAll(org.apache.commons.io.FileUtils.listFiles( + paths, new String[]{"jar"}, true)); + for (File f : jarFiles) { + result.add(f.getAbsolutePath()); + } + } else { + String[] files = path.split(","); + Collections.addAll(result, files); + } + return result; + } + /** * Add new elements to the classpath. * diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index eb2851b..48afaaa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -634,7 +634,7 @@ private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { for (String clientStatsPublisherClass : clientStatsPublisherClasses) { try { clientStatsPublishers.add((ClientStatsPublisher) Class.forName( - clientStatsPublisherClass.trim(), true, JavaUtils.getClassLoader()).newInstance()); + clientStatsPublisherClass.trim(), true, Utilities.getSessionSpecifiedClassLoader()).newInstance()); } catch (Exception e) { LOG.warn(e.getClass().getName() + " occured when trying to create class: " + clientStatsPublisherClass.trim() + " implementing ClientStatsPublisher interface"); diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java index 3f474f8..390ffd9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.exec.Utilities; public class HookUtils { /** @@ -57,7 +58,7 @@ String[] hookClasses = csHooks.split(","); for (String hookClass : hookClasses) { T hook = (T) Class.forName(hookClass.trim(), true, - JavaUtils.getClassLoader()).newInstance(); + Utilities.getSessionSpecifiedClassLoader()).newInstance(); hooks.add(hook); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java index 0962cad..04eff93 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobConf; @@ -65,7 +66,7 @@ private void createActualOF() throws IOException { { cls = (Class) Class.forName(actualOutputFormatClass, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } else { throw new RuntimeException("Null pointer detected in actualOutputFormatClass"); } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index 9051ba6..c4633f6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; @@ -307,7 +308,7 @@ public static HiveStorageHandler getStorageHandler( try { Class handlerClass = (Class) - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); HiveStorageHandler storageHandler = ReflectionUtils.newInstance(handlerClass, conf); return storageHandler; } catch (ClassNotFoundException e) { @@ -329,7 +330,7 @@ public static HiveIndexHandler getIndexHandler(HiveConf conf, try { Class handlerClass = (Class) - Class.forName(indexHandlerClass, true, JavaUtils.getClassLoader()); + Class.forName(indexHandlerClass, true, Utilities.getSessionSpecifiedClassLoader()); HiveIndexHandler indexHandler = ReflectionUtils.newInstance(handlerClass, conf); return indexHandler; } catch (ClassNotFoundException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index edec1b7..13277a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -302,7 +302,7 @@ public void setOutputFormatClass(Class outputFormatC } try { inputFormatClass = ((Class) Class.forName(clsName, true, - JavaUtils.getClassLoader())); + Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + clsName, e); } @@ -322,7 +322,7 @@ public void setOutputFormatClass(Class outputFormatC } try { Class c = (Class.forName(clsName, true, - JavaUtils.getClassLoader())); + Utilities.getSessionSpecifiedClassLoader())); // Replace FileOutputFormat for backward compatibility if (!HiveOutputFormat.class.isAssignableFrom(c)) { outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c,false); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 2f13ac2..4acafba 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; @@ -293,7 +294,7 @@ public HiveStorageHandler getStorageHandler() { inputFormatClass = getStorageHandler().getInputFormatClass(); } else { inputFormatClass = (Class) - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); @@ -329,7 +330,7 @@ public HiveStorageHandler getStorageHandler() { } else { c = Class.forName(className, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } } if (!HiveOutputFormat.class.isAssignableFrom(c)) { @@ -677,7 +678,7 @@ public void setInputFormatClass(String name) throws HiveException { } try { setInputFormatClass((Class>) Class - .forName(name, true, JavaUtils.getClassLoader())); + .forName(name, true, Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + name, e); } @@ -690,7 +691,7 @@ public void setOutputFormatClass(String name) throws HiveException { return; } try { - Class origin = Class.forName(name, true, JavaUtils.getClassLoader()); + Class origin = Class.forName(name, true, Utilities.getSessionSpecifiedClassLoader()); setOutputFormatClass(HiveFileFormatUtils .getOutputFormatSubstitute(origin,false)); } catch (ClassNotFoundException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java index b15aedc..c84c182 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java @@ -29,7 +29,6 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.FileSinkOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; @@ -41,6 +40,7 @@ import org.apache.hadoop.hive.ql.exec.SelectOperator; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -275,7 +275,7 @@ private static boolean isDeterministicUdf(GenericUDF udf) { String udfClassName = bridge.getUdfClassName(); try { UDF udfInternal = - (UDF) Class.forName(bridge.getUdfClassName(), true, JavaUtils.getClassLoader()) + (UDF) Class.forName(bridge.getUdfClassName(), true, Utilities.getSessionSpecifiedClassLoader()) .newInstance(); files = udfInternal.getRequiredFiles(); jars = udf.getRequiredJars(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index d86df45..de4025b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -445,7 +445,7 @@ private static void checkTable(Table table, CreateTableDesc tableDesc) * substitute OutputFormat name based on HiveFileFormatUtils.outputFormatSubstituteMap */ try { - Class origin = Class.forName(importedofc, true, JavaUtils.getClassLoader()); + Class origin = Class.forName(importedofc, true, Utilities.getSessionSpecifiedClassLoader()); Class replaced = HiveFileFormatUtils .getOutputFormatSubstitute(origin,false); if (replaced == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java index 0a1c660..396553a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; @@ -221,7 +222,7 @@ public static String ensureClassExists(String className) return null; } try { - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException("Cannot find class '" + className + "'", e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index b05d3b4..6e17b64 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2531,7 +2531,7 @@ private TableDesc getTableDescFromSerDe(ASTNode child, String cols, try { serdeClass = (Class) Class.forName(serdeName, - true, JavaUtils.getClassLoader()); + true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2720,7 +2720,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { serde = (Class) Class.forName(defaultSerdeName, - true, JavaUtils.getClassLoader()); + true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2787,7 +2787,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { return (Class) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2801,7 +2801,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { return (Class) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2819,7 +2819,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { return (Class) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java index 17eeae1..1a0cdf8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.PTFUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.util.ReflectionUtils; @@ -93,7 +94,7 @@ public GenericUDAFEvaluator getGenericUDAFEvaluator() { try { return genericUDAFEvaluator = ReflectionUtils.newInstance(Class.forName(genericUDAFEvaluatorClassName, true, - JavaUtils.getClassLoader()).asSubclass(GenericUDAFEvaluator.class), null); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(GenericUDAFEvaluator.class), null); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java index 930acbc..deba198 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java @@ -419,7 +419,7 @@ public void validate(HiveConf conf) if (this.getStorageHandler() == null) { try { Class origin = Class.forName(this.getOutputFormat(), true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); Class replaced = HiveFileFormatUtils .getOutputFormatSubstitute(origin,false); if (replaced == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index 39f1793..78d4d1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; @@ -65,7 +66,7 @@ public TableDesc( public Class getDeserializerClass() { try { return (Class) Class.forName( - getSerdeClassName(), true, JavaUtils.getClassLoader()); + getSerdeClassName(), true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java index 0d237f0..727f61f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java @@ -80,6 +80,8 @@ public static CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf) return new DeleteResourceProcessor(); case COMPILE: return new CompileProcessor(); + case RELOAD: + return new ReloadProcessor(); default: throw new AssertionError("Unknown HiveCommand " + hiveCommand); } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java index f5bc427..27d8325 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java @@ -31,6 +31,7 @@ DFS(), ADD(), LIST(), + RELOAD(), DELETE(), COMPILE(); private static final Set COMMANDS = new HashSet(); diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java new file mode 100644 index 0000000..eb5352d --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/processors/ReloadProcessor.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.processors; + +import java.io.IOException; + +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * used for reload auxiliary and jars without restarting hive server2 + */ +public class ReloadProcessor implements CommandProcessor{ + + @Override + public void init() { + } + + @Override + public CommandProcessorResponse run(String command) throws CommandNeedRetryException { + SessionState ss = SessionState.get(); + try { + ss.reloadAuxJars(); + } catch (IOException e) { + return CommandProcessorResponse.create(e); + } + return new CommandProcessorResponse(0); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 9798cf3..d2c451c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -24,14 +24,8 @@ import java.io.InputStream; import java.io.PrintStream; import java.net.URI; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; +import java.net.URLClassLoader; +import java.util.*; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; @@ -205,6 +199,11 @@ private Path localSessionPath; /** + * store the jars loaded last time + */ + final private Set preReloadableAuxJars = new HashSet(); + + /** * Get the lineage state stored in this session. * * @return LineageState @@ -719,7 +718,6 @@ static void validateFiles(List newFiles) throws IllegalArgumentException SessionState ss = SessionState.get(); Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); - LogHelper console = getConsole(); for (String newFile : newFiles) { try { if (Utilities.realFile(newFile, conf) == null) { @@ -733,6 +731,52 @@ static void validateFiles(List newFiles) throws IllegalArgumentException } } + // reloading the jars under the path specified in hive.reloadable.aux.jars.path property + public void reloadAuxJars() throws IOException { + final Set reloadedAuxJars = new HashSet(); + + final String renewableJarPath = conf.getVar(ConfVars.HIVERELOADABLEJARS); + // do nothing if this property is not specified or empty + if (renewableJarPath == null || renewableJarPath.isEmpty()) { + return; + } + + Set jarPaths = Utilities.getJarFilesByPath(renewableJarPath); + + // load jars under the hive.reloadable.aux.jars.path + if(!jarPaths.isEmpty()){ + reloadedAuxJars.addAll(jarPaths); + } + + // remove the previous renewable jars + try { + if (preReloadableAuxJars != null && !preReloadableAuxJars.isEmpty()) { + Utilities.removeFromClassPath(preReloadableAuxJars.toArray(new String[0])); + } + } catch (Exception e) { + String msg = "Fail to remove the reloaded jars loaded last time."; + throw new IOException(msg, e); + } + + try { + if (reloadedAuxJars != null && !reloadedAuxJars.isEmpty()) { + URLClassLoader currentCLoader = + (URLClassLoader) SessionState.get().getConf().getClassLoader(); + currentCLoader = + (URLClassLoader) Utilities.addToClassPath(currentCLoader, + reloadedAuxJars.toArray(new String[0])); + conf.setClassLoader(currentCLoader); + Thread.currentThread().setContextClassLoader(currentCLoader); + } + preReloadableAuxJars.clear(); + preReloadableAuxJars.addAll(reloadedAuxJars); + } catch (Exception e) { + String msg = + "Fail to add jars from the path specified in hive.reloadable.aux.jars.path property."; + throw new IOException(msg, e); + } + } + static void registerJars(List newJars) throws IllegalArgumentException { LogHelper console = getConsole(); try { diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java index e247184..b9878a3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.util.ReflectionUtils; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS; @@ -87,7 +88,7 @@ private StatsFactory(Configuration conf) { } private boolean initialize(String type) { - ClassLoader classLoader = JavaUtils.getClassLoader(); + ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader(); try { StatDB statDB = type.startsWith("jdbc") ? StatDB.jdbc : StatDB.valueOf(type); publisherImplementation = (Class) diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java index 959007a..e471285 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ConversionHelper; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -128,7 +129,7 @@ public void setOperator(boolean isOperator) { public Class getUdfClass() { try { - return (Class) Class.forName(udfClassName, true, JavaUtils.getClassLoader()); + return (Class) Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } @@ -138,7 +139,7 @@ public void setOperator(boolean isOperator) { public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { try { - udf = (UDF) Class.forName(udfClassName, true, JavaUtils.getClassLoader()).newInstance(); + udf = (UDF) Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()).newInstance(); } catch (Exception e) { throw new UDFArgumentException( "Unable to instantiate UDF implementation class " + udfClassName + ": " + e); diff --git ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java index 7fd6c17..97c823c 100644 --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java @@ -20,13 +20,19 @@ import static org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension; +import java.io.File; import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.List; +import java.util.Set; +import junit.framework.Assert; import junit.framework.TestCase; +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.conf.HiveConf; @@ -42,6 +48,7 @@ import org.junit.Test; public class TestUtilities extends TestCase { + public static final Log LOG = LogFactory.getLog(TestUtilities.class); public void testGetFileExtension() { JobConf jc = new JobConf(); @@ -120,12 +127,41 @@ public void testFSUmaskReset() throws Exception { private void checkFSUMaskReset(boolean recursiveArg) throws IllegalArgumentException, IOException { final String FS_MASK_VAL = "055"; HiveConf conf = new HiveConf(); - String dir = System.getProperty("test.tmp.dir") + "/testUtilitiesUMaskReset"; + String dir = System.getProperty("test.tmp.dir") + File.separator + "testUtilitiesUMaskReset"; conf.set(FsPermission.UMASK_LABEL, FS_MASK_VAL); Utilities.createDirsWithPermission(conf, new Path(dir), new FsPermission((short) 00777), recursiveArg); assertEquals(conf.get(FsPermission.UMASK_LABEL), FS_MASK_VAL); } - + @Test + public void testGetJarFilesByPath() { + String dir = System.getProperty("java.io.tmpdir") + File.separator + + "testUtilitiesGetJarFilesByPath"; + File f = new File(dir); + if (!f.exists()) { + f.mkdir(); + } + String jarFileName1 = dir + File.separator + "a.jar"; + String jarFileName2 = dir + File.separator + "b.jar"; + File jarFile = new File(jarFileName1); + try { + FileUtils.touch(jarFile); + Set jars = Utilities.getJarFilesByPath(dir); + Assert.assertTrue(jars.contains(jarFile.getAbsolutePath())); + + File jarFile2 = new File(jarFileName2); + FileUtils.touch(jarFile2); + String newPath = "file://" + jarFileName1 + "," + "file://" + jarFileName2; + jars = Utilities.getJarFilesByPath(newPath); + + Assert.assertTrue(jars.contains("file://" + jarFileName1)); + Assert.assertTrue(jars.contains("file://" + jarFileName2)); + } catch (IOException e) { + LOG.error("failed to copy file to reloading folder", e); + org.junit.Assert.fail(e.getMessage()); + } finally { + FileUtils.deleteQuietly(f); + } + } } diff --git ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index ef0052f..3e00f6d 100644 --- ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -20,18 +20,27 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import java.io.File; +import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; +import org.apache.commons.io.FileUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hive.common.util.HiveTestUtils; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import com.google.common.io.Files; + /** * Test SessionState */ @@ -39,6 +48,16 @@ public class TestSessionState { private final boolean prewarm; + private final static String hiveReloadPath = System.getProperty("java.io.tmpdir") + File.separator + + "roadablePath"; + private final static String clazzV1FileName = "SessionStateTest.jar.v1"; + private final static String clazzV2FileName = "SessionStateTest.jar.v2"; + private final static String reloadClazzFileName = "reloadingClazz.jar"; + private final static String reloadClazzV2FileName = "reloadingClazz-V2.jar"; + private final static String reloadClazzName = "org.apache.test.RefreshedJarClass"; + private final static String versionMethodName = "version"; + private File reloadFolder; + public static final Log LOG = LogFactory.getLog(TestSessionState.class); public TestSessionState(Boolean mode) { this.prewarm = mode.booleanValue(); @@ -129,4 +148,114 @@ public void testClassLoaderEquality() throws Exception { assertEquals("Other thread loader and current thread loader", otherThread.loader, Thread.currentThread().getContextClassLoader()); } + + class AddClazzRunnable implements Runnable { + SessionState ss; + + public AddClazzRunnable() { + HiveConf conf = new HiveConf(); + HiveConf.setVar(conf, ConfVars.HIVERELOADABLEJARS, hiveReloadPath); + // create the reloading folder to place jar files if not exist + reloadFolder = new File(hiveReloadPath); + if (!reloadFolder.exists()) { + reloadFolder.mkdir(); + } + ss = new SessionState(conf); + } + + public void run() { + SessionState.start(ss); + + try { + ss = SessionState.get(); + ss.reloadAuxJars(); + File dist = new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzFileName); + Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzV1FileName)), dist); + ss.reloadAuxJars(); + } catch (Exception e) { + LOG.error("failed to copy file to reloading folder",e); + Assert.fail(e.getMessage()); + } + } + } + + class ReloadExistingClazzRunnable implements Runnable { + SessionState ss; + + public ReloadExistingClazzRunnable() { + HiveConf conf = new HiveConf(); + HiveConf.setVar(conf, ConfVars.HIVERELOADABLEJARS, hiveReloadPath); + // create the reloading folder to place jar files if not exist + reloadFolder = new File(hiveReloadPath); + if (!reloadFolder.exists()) { + reloadFolder.mkdir(); + } + ss = new SessionState(conf); + } + + public void run() { + SessionState.start(ss); + + try { + ss = SessionState.get(); + ss.reloadAuxJars(); + + LOG.info("copy jar file 1"); + File dist = new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzFileName); + + Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzV1FileName)), dist); + ss.reloadAuxJars(); + + LOG.info("copy jar file 2"); + FileUtils.deleteQuietly(dist); + dist = new File(reloadFolder.getAbsolutePath() + File.separator + reloadClazzV2FileName); + Files.copy(new File(HiveTestUtils.getFileFromClasspath(clazzV2FileName)), dist); + + ss.reloadAuxJars(); + } catch (Exception e) { + LOG.error("failed to copy file to reloading folder", e); + Assert.fail(e.getMessage()); + } + } + } + + private String getReloadedClazzVersion(ClassLoader cl) throws Exception { + Class addedClazz = Class.forName(reloadClazzName, true, cl); + Method versionMethod = addedClazz.getMethod(versionMethodName); + return (String) versionMethod.invoke(addedClazz.newInstance()); + } + + @Test + public void testReloadAuxJars() { + LOG.info("add new jar test"); + AddClazzRunnable otherSessionThread = new AddClazzRunnable(); + Thread th = new Thread(otherSessionThread); + th.start(); + try { + th.join(); + Assert.assertEquals("version1", getReloadedClazzVersion(th.getContextClassLoader())); + } catch (Exception e) { + LOG.error("test fail with message: ", e); + Assert.fail(e.getMessage()); + } finally { + FileUtils.deleteQuietly(reloadFolder); + } + } + + @Test + public void testReloadExistingAuxJars() { + LOG.info("reload existing jars test"); + ReloadExistingClazzRunnable otherSessionThread = new ReloadExistingClazzRunnable(); + Thread th = new Thread(otherSessionThread); + th.start(); + try { + th.join(); + Assert.assertEquals("version2", getReloadedClazzVersion(otherSessionThread.ss.getConf().getClassLoader())); + } catch (Exception e) { + LOG.error("test fail with message: ", e); + Assert.fail(e.getMessage()); + } finally { + FileUtils.deleteQuietly(reloadFolder); + } + } } diff --git ql/src/test/resources/SessionStateTest.jar.v1 ql/src/test/resources/SessionStateTest.jar.v1 new file mode 100644 index 0000000..47bceb8 Binary files /dev/null and ql/src/test/resources/SessionStateTest.jar.v1 differ diff --git ql/src/test/resources/SessionStateTest.jar.v2 ql/src/test/resources/SessionStateTest.jar.v2 new file mode 100644 index 0000000..df0da41 Binary files /dev/null and ql/src/test/resources/SessionStateTest.jar.v2 differ diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index bc0a02c..3ce5a99 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -113,6 +113,11 @@ public HiveSessionImpl(TProtocolVersion protocol, String username, String passwo public void initialize(Map sessionConfMap) throws Exception { //process global init file: .hiverc processGlobalInitFile(); + try { + sessionState.reloadAuxJars(); + } catch (IOException e) { + LOG.error("fail to load reloadable jar file path", e); + } SessionState.setCurrentSessionState(sessionState); //set conf properties specified by user from client side