diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 7f4afd9..578d047 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -572,6 +572,11 @@ HIVEJAR("hive.jar.path", "", ""), HIVEAUXJARS("hive.aux.jars.path", "", ""), + // refreshable jars + HIVEREFRESHJARS("hive.refresh.aux.jars.path", "", + "Jars can be renewed by executing refresh. And these jars can by " + + "used as the auxiliary classes like creating a UDF or SerDer."), + // hive added files and jars HIVEADDEDFILES("hive.added.files.path", "", ""), HIVEADDEDJARS("hive.added.jars.path", "", ""), @@ -1576,7 +1581,7 @@ HIVE_SERVER2_SSL_KEYSTORE_PATH("hive.server2.keystore.path", "", ""), HIVE_SERVER2_SSL_KEYSTORE_PASSWORD("hive.server2.keystore.password", "", ""), - HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,compile", + HIVE_SECURITY_COMMAND_WHITELIST("hive.security.command.whitelist", "set,reset,dfs,add,list,delete,refresh,compile", "Comma separated list of non-SQL Hive commands users are authorized to execute"), HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java index 93a03ad..4fdb5c9 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.Partition; @@ -425,7 +426,7 @@ public static HiveStorageHandler getStorageHandler(Configuration conf, try { Class handlerClass = (Class) Class - .forName(storageHandler, true, JavaUtils.getClassLoader()); + .forName(storageHandler, true, Utilities.getSessionSpecifiedClassLoader()); return (HiveStorageHandler) ReflectionUtils.newInstance( handlerClass, conf); } catch (ClassNotFoundException e) { diff --git hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java index f25039d..ccad819 100644 --- hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java +++ hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClient.java @@ -22,8 +22,8 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.api.PartitionEventType; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; @@ -49,7 +49,7 @@ public static HCatClient create(Configuration conf) throws HCatException { HCatClientHMSImpl.class.getName()); try { Class clientClass = Class.forName(className, - true, JavaUtils.getClassLoader()).asSubclass( + true, Utilities.getSessionSpecifiedClassLoader()).asSubclass( HCatClient.class); client = (HCatClient) clientClass.newInstance(); } catch (ClassNotFoundException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java index 5924bcf..928be42 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/DefaultFetchFormatter.java @@ -53,7 +53,7 @@ public void initialize(Configuration hconf, Properties props) throws HiveExcepti private SerDe initializeSerde(Configuration conf, Properties props) throws Exception { String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); Class serdeClass = Class.forName(serdeName, true, - JavaUtils.getClassLoader()).asSubclass(SerDe.class); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(SerDe.class); // cast only needed for Hadoop 0.17 compatibility SerDe serde = ReflectionUtils.newInstance(serdeClass, null); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java index 0c6a3d4..dbb8eaf 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java @@ -39,7 +39,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Function; @@ -562,7 +561,7 @@ private static FunctionInfo getFunctionInfoFromMetastore(String functionName) { return null; } - Class udfClass = Class.forName(func.getClassName(), true, JavaUtils.getClassLoader()); + Class udfClass = Class.forName(func.getClassName(), true, Utilities.getSessionSpecifiedClassLoader()); if (registerTemporaryFunction(functionName, udfClass)) { ret = mFunctions.get(functionName); } else { @@ -610,7 +609,7 @@ private static void checkFunctionClass(CommonFunctionInfo cfi) throws ClassNotFo // Even if we have a reference to the class (which will be the case for GenericUDFs), // the classloader may not be able to resolve the class, which would mean reflection-based // methods would fail such as for plan deserialization. Make sure this works too. - Class.forName(udfClass.getName(), true, JavaUtils.getClassLoader()); + Class.forName(udfClass.getName(), true, Utilities.getSessionSpecifiedClassLoader()); } private static void loadFunctionResourcesIfNecessary(String functionName, CommonFunctionInfo cfi) { diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java index bd45df1..6c5d605 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java @@ -21,7 +21,6 @@ import static org.apache.hadoop.util.StringUtils.stringifyException; import java.io.IOException; -import java.net.URI; import java.util.List; import org.apache.commons.logging.Log; @@ -33,10 +32,8 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.ResourceType; import org.apache.hadoop.hive.metastore.api.ResourceUri; -import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; -import org.apache.hadoop.hive.ql.exec.FunctionUtils.FunctionType; import org.apache.hadoop.hive.ql.exec.FunctionUtils.UDFClassType; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -47,10 +44,6 @@ import org.apache.hadoop.hive.ql.plan.FunctionWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; -import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; -import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; /** @@ -308,9 +301,14 @@ public static void addFunctionResources(List resources) throws Hive } } - @SuppressWarnings("unchecked") private Class getUdfClass(CreateFunctionDesc desc) throws ClassNotFoundException { - return Class.forName(desc.getClassName(), true, JavaUtils.getClassLoader()); + // get the session specified class loader from SessionState + ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader(); + + if (classLoader == null) { + classLoader = JavaUtils.getClassLoader(); + } + return Class.forName(desc.getClassName(), true, classLoader); } @Override diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java index dcc19f7..25797c6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java @@ -57,7 +57,7 @@ private FetchFormatter initializeFetcher(Configuration conf) throws Exception { FetchFormatter fetcher; if (formatterName != null && !formatterName.isEmpty()) { Class fetcherClass = Class.forName(formatterName, true, - JavaUtils.getClassLoader()).asSubclass(FetchFormatter.class); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(FetchFormatter.class); fetcher = ReflectionUtils.newInstance(fetcherClass, null); } else { fetcher = new DefaultFetchFormatter(); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java index 76fee61..e79dee4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java @@ -80,19 +80,12 @@ import java.util.zip.InflaterInputStream; import org.antlr.runtime.CommonToken; +import org.apache.avro.mapred.SequenceFileInputFormat; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.WordUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.filecache.DistributedCache; -import org.apache.hadoop.fs.ContentSummary; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.PathFilter; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.HiveInterruptCallback; import org.apache.hadoop.hive.common.HiveInterruptUtils; import org.apache.hadoop.hive.common.HiveStatsUtils; @@ -102,11 +95,20 @@ import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; -import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; +import org.apache.hadoop.hive.ql.exec.Utilities.CommonTokenDelegate; +import org.apache.hadoop.hive.ql.exec.Utilities.DatePersistenceDelegate; +import org.apache.hadoop.hive.ql.exec.Utilities.EnumDelegate; +import org.apache.hadoop.hive.ql.exec.Utilities.ListDelegate; +import org.apache.hadoop.hive.ql.exec.Utilities.MapDelegate; +import org.apache.hadoop.hive.ql.exec.Utilities.PathDelegate; +import org.apache.hadoop.hive.ql.exec.Utilities.ReduceField; +import org.apache.hadoop.hive.ql.exec.Utilities.SQLCommand; +import org.apache.hadoop.hive.ql.exec.Utilities.StreamStatus; +import org.apache.hadoop.hive.ql.exec.Utilities.TimestampPersistenceDelegate; import org.apache.hadoop.hive.ql.exec.mr.ExecDriver; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper; import org.apache.hadoop.hive.ql.exec.mr.ExecReducer; @@ -149,35 +151,18 @@ import org.apache.hadoop.hive.ql.plan.PlanUtils.ExpressionTypes; import org.apache.hadoop.hive.ql.plan.ReduceWork; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.ql.plan.api.Adjacency; -import org.apache.hadoop.hive.ql.plan.api.Graph; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.Serializer; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.io.SequenceFile; -import org.apache.hadoop.io.SequenceFile.CompressionType; -import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.Writable; -import org.apache.hadoop.io.WritableComparable; -import org.apache.hadoop.io.compress.CompressionCodec; -import org.apache.hadoop.io.compress.DefaultCodec; -import org.apache.hadoop.mapred.FileInputFormat; -import org.apache.hadoop.mapred.FileOutputFormat; -import org.apache.hadoop.mapred.InputFormat; -import org.apache.hadoop.mapred.JobConf; -import org.apache.hadoop.mapred.RecordReader; -import org.apache.hadoop.mapred.Reporter; -import org.apache.hadoop.mapred.SequenceFileInputFormat; -import org.apache.hadoop.mapred.SequenceFileOutputFormat; -import org.apache.hadoop.util.Progressable; -import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.util.Shell; +import org.codehaus.groovy.reflection.ReflectionUtils; +import org.mockito.exceptions.Reporter; + +import parquet.format.CompressionCodec; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; @@ -1909,6 +1894,22 @@ public static String getResourceFiles(Configuration conf, SessionState.ResourceT } /** + * get session specified class loader and get current class loader if fall + * + * @return + */ + public static ClassLoader getSessionSpecifiedClassLoader() { + SessionState state = SessionState.get(); + if (state == null || state.getConf() == null) { + return JavaUtils.getClassLoader(); + } + ClassLoader sessionCL = state.getConf().getClassLoader(); + if (sessionCL != null) + return sessionCL; + return JavaUtils.getClassLoader(); + } + + /** * Create a URL from a string representing a path to a local file. * The path string can be just a path, or can start with file:/, file:/// * @param onestr path string diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java index eb2851b..48afaaa 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java @@ -634,7 +634,7 @@ private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException { for (String clientStatsPublisherClass : clientStatsPublisherClasses) { try { clientStatsPublishers.add((ClientStatsPublisher) Class.forName( - clientStatsPublisherClass.trim(), true, JavaUtils.getClassLoader()).newInstance()); + clientStatsPublisherClass.trim(), true, Utilities.getSessionSpecifiedClassLoader()).newInstance()); } catch (Exception e) { LOG.warn(e.getClass().getName() + " occured when trying to create class: " + clientStatsPublisherClass.trim() + " implementing ClientStatsPublisher interface"); diff --git ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java index 3f474f8..390ffd9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/hooks/HookUtils.java @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.exec.Utilities; public class HookUtils { /** @@ -57,7 +58,7 @@ String[] hookClasses = csHooks.split(","); for (String hookClass : hookClasses) { T hook = (T) Class.forName(hookClass.trim(), true, - JavaUtils.getClassLoader()).newInstance(); + Utilities.getSessionSpecifiedClassLoader()).newInstance(); hooks.add(hook); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java index 0962cad..04eff93 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HivePassThroughOutputFormat.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.JobConf; @@ -65,7 +66,7 @@ private void createActualOF() throws IOException { { cls = (Class) Class.forName(actualOutputFormatClass, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } else { throw new RuntimeException("Null pointer detected in actualOutputFormatClass"); } diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java index 9051ba6..809d6ab 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.index.HiveIndexHandler; import org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator; import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; @@ -35,6 +36,7 @@ import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory; import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.ReflectionUtils; @@ -307,7 +309,7 @@ public static HiveStorageHandler getStorageHandler( try { Class handlerClass = (Class) - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); HiveStorageHandler storageHandler = ReflectionUtils.newInstance(handlerClass, conf); return storageHandler; } catch (ClassNotFoundException e) { @@ -329,7 +331,7 @@ public static HiveIndexHandler getIndexHandler(HiveConf conf, try { Class handlerClass = (Class) - Class.forName(indexHandlerClass, true, JavaUtils.getClassLoader()); + Class.forName(indexHandlerClass, true, Utilities.getSessionSpecifiedClassLoader()); HiveIndexHandler indexHandler = ReflectionUtils.newInstance(handlerClass, conf); return indexHandler; } catch (ClassNotFoundException e) { @@ -377,7 +379,7 @@ public static HiveAuthorizationProvider getAuthorizeProviderManager( if (authzClassName == null || authzClassName.trim().equals("")) { cls = DefaultHiveAuthorizationProvider.class; } else { - Class configClass = Class.forName(authzClassName, true, JavaUtils.getClassLoader()); + Class configClass = Class.forName(authzClassName, true, Utilities.getSessionSpecifiedClassLoader()); if(nullIfOtherClass && !HiveAuthorizationProvider.class.isAssignableFrom(configClass) ){ return null; } @@ -432,7 +434,7 @@ public static HiveAuthenticationProvider getAuthenticator( cls = HadoopDefaultAuthenticator.class; } else { cls = (Class) Class.forName( - clsStr, true, JavaUtils.getClassLoader()); + clsStr, true, Utilities.getSessionSpecifiedClassLoader()); } if (cls != null) { ret = ReflectionUtils.newInstance(cls, conf); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index edec1b7..13277a9 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -302,7 +302,7 @@ public void setOutputFormatClass(Class outputFormatC } try { inputFormatClass = ((Class) Class.forName(clsName, true, - JavaUtils.getClassLoader())); + Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + clsName, e); } @@ -322,7 +322,7 @@ public void setOutputFormatClass(Class outputFormatC } try { Class c = (Class.forName(clsName, true, - JavaUtils.getClassLoader())); + Utilities.getSessionSpecifiedClassLoader())); // Replace FileOutputFormat for backward compatibility if (!HiveOutputFormat.class.isAssignableFrom(c)) { outputFormatClass = HiveFileFormatUtils.getOutputFormatSubstitute(c,false); diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java index 2f13ac2..4acafba 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java @@ -48,6 +48,7 @@ import org.apache.hadoop.hive.metastore.api.SkewedInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; @@ -293,7 +294,7 @@ public HiveStorageHandler getStorageHandler() { inputFormatClass = getStorageHandler().getInputFormatClass(); } else { inputFormatClass = (Class) - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } } catch (ClassNotFoundException e) { throw new RuntimeException(e); @@ -329,7 +330,7 @@ public HiveStorageHandler getStorageHandler() { } else { c = Class.forName(className, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } } if (!HiveOutputFormat.class.isAssignableFrom(c)) { @@ -677,7 +678,7 @@ public void setInputFormatClass(String name) throws HiveException { } try { setInputFormatClass((Class>) Class - .forName(name, true, JavaUtils.getClassLoader())); + .forName(name, true, Utilities.getSessionSpecifiedClassLoader())); } catch (ClassNotFoundException e) { throw new HiveException("Class not found: " + name, e); } @@ -690,7 +691,7 @@ public void setOutputFormatClass(String name) throws HiveException { return; } try { - Class origin = Class.forName(name, true, JavaUtils.getClassLoader()); + Class origin = Class.forName(name, true, Utilities.getSessionSpecifiedClassLoader()); setOutputFormatClass(HiveFileFormatUtils .getOutputFormatSubstitute(origin,false)); } catch (ClassNotFoundException e) { diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java index b15aedc..d3ed3ed 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java @@ -30,17 +30,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.common.JavaUtils; -import org.apache.hadoop.hive.ql.exec.ColumnInfo; -import org.apache.hadoop.hive.ql.exec.FileSinkOperator; -import org.apache.hadoop.hive.ql.exec.FilterOperator; -import org.apache.hadoop.hive.ql.exec.GroupByOperator; -import org.apache.hadoop.hive.ql.exec.JoinOperator; -import org.apache.hadoop.hive.ql.exec.Operator; -import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator; -import org.apache.hadoop.hive.ql.exec.RowSchema; -import org.apache.hadoop.hive.ql.exec.SelectOperator; -import org.apache.hadoop.hive.ql.exec.TableScanOperator; -import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.*; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessor; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -275,7 +265,7 @@ private static boolean isDeterministicUdf(GenericUDF udf) { String udfClassName = bridge.getUdfClassName(); try { UDF udfInternal = - (UDF) Class.forName(bridge.getUdfClassName(), true, JavaUtils.getClassLoader()) + (UDF) Class.forName(bridge.getUdfClassName(), true, Utilities.getSessionSpecifiedClassLoader()) .newInstance(); files = udfInternal.getRequiredFiles(); jars = udf.getRequiredJars(); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index d86df45..de4025b 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -445,7 +445,7 @@ private static void checkTable(Table table, CreateTableDesc tableDesc) * substitute OutputFormat name based on HiveFileFormatUtils.outputFormatSubstituteMap */ try { - Class origin = Class.forName(importedofc, true, JavaUtils.getClassLoader()); + Class origin = Class.forName(importedofc, true, Utilities.getSessionSpecifiedClassLoader()); Class replaced = HiveFileFormatUtils .getOutputFormatSubstitute(origin,false); if (replaced == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java index 0a1c660..396553a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.ErrorMsg; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; @@ -221,7 +222,7 @@ public static String ensureClassExists(String className) return null; } try { - Class.forName(className, true, JavaUtils.getClassLoader()); + Class.forName(className, true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException("Cannot find class '" + className + "'", e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index b05d3b4..6e17b64 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -2531,7 +2531,7 @@ private TableDesc getTableDescFromSerDe(ASTNode child, String cols, try { serdeClass = (Class) Class.forName(serdeName, - true, JavaUtils.getClassLoader()); + true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2720,7 +2720,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { serde = (Class) Class.forName(defaultSerdeName, - true, JavaUtils.getClassLoader()); + true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2787,7 +2787,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { return (Class) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2801,7 +2801,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { return (Class) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } @@ -2819,7 +2819,7 @@ private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input) try { return (Class) Class.forName(name, true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new SemanticException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java index 17eeae1..1a0cdf8 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.PTFUtils; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.util.ReflectionUtils; @@ -93,7 +94,7 @@ public GenericUDAFEvaluator getGenericUDAFEvaluator() { try { return genericUDAFEvaluator = ReflectionUtils.newInstance(Class.forName(genericUDAFEvaluatorClassName, true, - JavaUtils.getClassLoader()).asSubclass(GenericUDAFEvaluator.class), null); + Utilities.getSessionSpecifiedClassLoader()).asSubclass(GenericUDAFEvaluator.class), null); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java index 930acbc..deba198 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java @@ -419,7 +419,7 @@ public void validate(HiveConf conf) if (this.getStorageHandler() == null) { try { Class origin = Class.forName(this.getOutputFormat(), true, - JavaUtils.getClassLoader()); + Utilities.getSessionSpecifiedClassLoader()); Class replaced = HiveFileFormatUtils .getOutputFormatSubstitute(origin,false); if (replaced == null) { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java index 39f1793..78d4d1f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat; @@ -65,7 +66,7 @@ public TableDesc( public Class getDeserializerClass() { try { return (Class) Class.forName( - getSerdeClassName(), true, JavaUtils.getClassLoader()); + getSerdeClassName(), true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java index 0d237f0..e1b4812 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorFactory.java @@ -80,6 +80,8 @@ public static CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf) return new DeleteResourceProcessor(); case COMPILE: return new CompileProcessor(); + case REFRESH: + return new RefreshProcessor(); default: throw new AssertionError("Unknown HiveCommand " + hiveCommand); } diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java index f5bc427..b3bb408 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/HiveCommand.java @@ -31,6 +31,7 @@ DFS(), ADD(), LIST(), + REFRESH(), DELETE(), COMPILE(); private static final Set COMMANDS = new HashSet(); diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/RefreshProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/RefreshProcessor.java new file mode 100644 index 0000000..6a18282 --- /dev/null +++ ql/src/java/org/apache/hadoop/hive/ql/processors/RefreshProcessor.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.processors; + +import java.io.IOException; + +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * used for refresh auxiliary and jars without restarting hive server2 + */ +public class RefreshProcessor implements CommandProcessor{ + + @Override + public void init() { + } + + @Override + public CommandProcessorResponse run(String command) throws CommandNeedRetryException { + SessionState ss = SessionState.get(); + try { + ss.refreshAuxJars(); + } catch (IOException e) { + return CommandProcessorResponse.create(e); + } + return new CommandProcessorResponse(0); + } +} diff --git ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java index 9798cf3..047913d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java +++ ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java @@ -24,6 +24,7 @@ import java.io.InputStream; import java.io.PrintStream; import java.net.URI; +import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -37,11 +38,6 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; @@ -67,7 +63,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl; import org.apache.hadoop.hive.ql.util.DosToUnix; import org.apache.hadoop.hive.shims.ShimLoader; -import org.apache.hadoop.util.ReflectionUtils; +import org.codehaus.groovy.reflection.ReflectionUtils; import com.google.common.base.Preconditions; @@ -719,7 +715,6 @@ static void validateFiles(List newFiles) throws IllegalArgumentException SessionState ss = SessionState.get(); Configuration conf = (ss == null) ? new Configuration() : ss.getConf(); - LogHelper console = getConsole(); for (String newFile : newFiles) { try { if (Utilities.realFile(newFile, conf) == null) { @@ -733,6 +728,61 @@ static void validateFiles(List newFiles) throws IllegalArgumentException } } + // store the jars loaded last time + private Set preRefreshableAuxJars = new HashSet(); + + // reloading the jars under the path specified in hive.refresh.aux.jars.path property + public void refreshAuxJars() throws IOException { + String renewable_jar_path = conf.getVar(ConfVars.HIVEREFRESHJARS); + // do nothing if this property is not specified or empty + if (renewable_jar_path == null || renewable_jar_path.isEmpty()) { + return; + } + + Set jarFilesToAdd = new HashSet(); + File hive_refresh_aux_jars = new File(renewable_jar_path); + if (!hive_refresh_aux_jars.exists()) { + // no need to refresh + return; + } else { + jarFilesToAdd.addAll(org.apache.commons.io.FileUtils.listFiles( + hive_refresh_aux_jars, new String[] { "jar" }, true)); + } + + // load jars under the hive.refresh.aux.jars.path + Set refreshedAuxJars = new HashSet(); + for (File f : jarFilesToAdd) { + refreshedAuxJars.add(f.getAbsolutePath()); + } + + // remove the previous renewable jars + try { + if (preRefreshableAuxJars != null && !preRefreshableAuxJars.isEmpty()) { + Utilities.removeFromClassPath(preRefreshableAuxJars.toArray(new String[0])); + } + } catch (Exception e) { + String msg = "Fail to remove the refreshable jars loaded last time."; + throw new IOException(msg, e); + } + + try { + if (refreshedAuxJars != null && !refreshedAuxJars.isEmpty()) { + URLClassLoader currentCLoader = + (URLClassLoader) SessionState.get().getConf().getClassLoader(); + currentCLoader = + (URLClassLoader) Utilities.addToClassPath(currentCLoader, + refreshedAuxJars.toArray(new String[0])); + conf.setClassLoader(currentCLoader); + Thread.currentThread().setContextClassLoader(currentCLoader); + } + preRefreshableAuxJars = refreshedAuxJars; + } catch (Exception e) { + String msg = + "Fail to add jars from the path specified in hive.refresh.aux.jars.path property."; + throw new IOException(msg, e); + } + } + static void registerJars(List newJars) throws IllegalArgumentException { LogHelper console = getConsole(); try { diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java index e247184..b9878a3 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.StatsSetupConst.StatDB; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.util.ReflectionUtils; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS; @@ -87,7 +88,7 @@ private StatsFactory(Configuration conf) { } private boolean initialize(String type) { - ClassLoader classLoader = JavaUtils.getClassLoader(); + ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader(); try { StatDB statDB = type.startsWith("jdbc") ? StatDB.jdbc : StatDB.valueOf(type); publisherImplementation = (Class) diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java index 959007a..e471285 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBridge.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDF; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ConversionHelper; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; @@ -128,7 +129,7 @@ public void setOperator(boolean isOperator) { public Class getUdfClass() { try { - return (Class) Class.forName(udfClassName, true, JavaUtils.getClassLoader()); + return (Class) Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } @@ -138,7 +139,7 @@ public void setOperator(boolean isOperator) { public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { try { - udf = (UDF) Class.forName(udfClassName, true, JavaUtils.getClassLoader()).newInstance(); + udf = (UDF) Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader()).newInstance(); } catch (Exception e) { throw new UDFArgumentException( "Unable to instantiate UDF implementation class " + udfClassName + ": " + e); diff --git ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java index ef0052f..ecffdf8 100644 --- ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java +++ ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java @@ -20,12 +20,17 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; +import java.io.File; +import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import org.apache.hive.common.util.HiveTestUtils; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -39,6 +44,13 @@ public class TestSessionState { private final boolean prewarm; + private final static String hive_refresh_path = "./tmpfolder/"; + private final static String clazz_v1_file_name = "SessionStateTest-V1.jar"; + private final static String clazz_v2_file_name = "SessionStateTest-V2.jar"; + private final static String refresh_clazz_file_name = "refresh_clazz.jar"; + private final static String refresh_clazz_name = "org.apache.test.RefreshedJarClass"; + private final static String version_method_name = "version"; + private File refresh_folder; public TestSessionState(Boolean mode) { this.prewarm = mode.booleanValue(); @@ -129,4 +141,108 @@ public void testClassLoaderEquality() throws Exception { assertEquals("Other thread loader and current thread loader", otherThread.loader, Thread.currentThread().getContextClassLoader()); } + + class RefreshJarRunnable implements Runnable { + SessionState ss; + + public RefreshJarRunnable(SessionState ss) { + this.ss = ss; + } + + public void run() { + SessionState.start(ss); + + try { + FileUtils.copyFile( + new File(HiveTestUtils.getFileFromClasspath(clazz_v1_file_name)), + new File(refresh_folder.getAbsolutePath() + File.separator + + refresh_clazz_file_name)); + SessionState.get().refreshAuxJars(); + } catch (Exception e) { + e.printStackTrace(); + Assert.assertTrue(false); + } + } + } + + class RefreshExistingJarRunnable implements Runnable { + SessionState ss; + + public RefreshExistingJarRunnable(SessionState ss) { + this.ss = ss; + } + + public void run() { + SessionState.start(ss); + + try { + FileUtils.copyFile( + new File(HiveTestUtils.getFileFromClasspath(clazz_v1_file_name)), + new File(refresh_folder.getAbsolutePath() + File.separator + + refresh_clazz_file_name)); + SessionState.get().refreshAuxJars(); + + FileUtils.copyFile( + new File(HiveTestUtils.getFileFromClasspath(clazz_v2_file_name)), + new File(refresh_folder.getAbsolutePath() + File.separator + + refresh_clazz_file_name)); + SessionState.get().refreshAuxJars(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + private String getRefreshClazzVersion(ClassLoader cl) throws Exception { + Class addedClazz = Class.forName(refresh_clazz_name, true, cl); + Method versionMethod = addedClazz.getMethod(version_method_name); + return (String) versionMethod.invoke(addedClazz.newInstance()); + } + + @Test + public void testRefreshAuxJars() { + HiveConf conf = new HiveConf(); + HiveConf.setVar(conf, ConfVars.HIVEREFRESHJARS, hive_refresh_path); + // create the refreshing folder to place jar files if not exist + refresh_folder = new File(hive_refresh_path); + if (!refresh_folder.exists()) { + refresh_folder.mkdir(); + } + SessionState ss = new SessionState(conf); + RefreshJarRunnable otherSessionThread = new RefreshJarRunnable(ss); + Thread th = new Thread(otherSessionThread); + th.start(); + try { + th.join(); + Assert.assertEquals("version1", getRefreshClazzVersion(SessionState.get().getConf().getClassLoader())); + } catch (Exception e) { + e.printStackTrace(); + } finally { + FileUtils.deleteQuietly(refresh_folder); + } + } + + @Test + public void testRefreshExistingAuxJars() { + HiveConf conf = new HiveConf(); + HiveConf.setVar(conf, ConfVars.HIVEREFRESHJARS, hive_refresh_path); + // create the refreshing folder to place jar files if not exist + refresh_folder = new File(hive_refresh_path); + if (!refresh_folder.exists()) { + refresh_folder.mkdir(); + } + SessionState ss = new SessionState(conf); + RefreshExistingJarRunnable otherSessionThread = + new RefreshExistingJarRunnable(ss); + Thread th = new Thread(otherSessionThread); + th.start(); + try { + th.join(); + Assert.assertEquals("version2", getRefreshClazzVersion(SessionState.get().getConf().getClassLoader())); + } catch (Exception e) { + e.printStackTrace(); + } finally { + FileUtils.deleteQuietly(refresh_folder); + } + } } diff --git ql/src/test/resources/SessionStateTest-V1.jar ql/src/test/resources/SessionStateTest-V1.jar new file mode 100644 index 0000000..47bceb8 Binary files /dev/null and ql/src/test/resources/SessionStateTest-V1.jar differ diff --git ql/src/test/resources/SessionStateTest-V2.jar ql/src/test/resources/SessionStateTest-V2.jar new file mode 100644 index 0000000..df0da41 Binary files /dev/null and ql/src/test/resources/SessionStateTest-V2.jar differ diff --git service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java index bc0a02c..29665f4 100644 --- service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java +++ service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java @@ -113,6 +113,10 @@ public HiveSessionImpl(TProtocolVersion protocol, String username, String passwo public void initialize(Map sessionConfMap) throws Exception { //process global init file: .hiverc processGlobalInitFile(); + try { + sessionState.refreshAuxJars(); + } catch (IOException e) { + } SessionState.setCurrentSessionState(sessionState); //set conf properties specified by user from client side