diff --git accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java index 16abac2..dc4782a 100644 --- accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java +++ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/Utils.java @@ -42,6 +42,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.util.StringUtils; import org.apache.log4j.Logger; @@ -207,7 +208,7 @@ private static String getJar(Class my_class) { Class jarFinder = null; try { log.debug("Looking for " + hadoopJarFinder + "."); - jarFinder = Class.forName(hadoopJarFinder); + jarFinder = JavaUtils.loadClass(hadoopJarFinder); log.debug(hadoopJarFinder + " found."); Method getJar = jarFinder.getMethod("getJar", Class.class); ret = (String) getJar.invoke(null, my_class); diff --git accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java index ef459aa..4b5fae6 100644 --- accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java +++ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/PrimitiveComparisonFilter.java @@ -36,6 +36,7 @@ import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloColumnMapping; import org.apache.hadoop.hive.accumulo.predicate.compare.CompareOp; import org.apache.hadoop.hive.accumulo.predicate.compare.PrimitiveComparison; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; @@ -117,8 +118,8 @@ public void init(SortedKeyValueIterator source, Map op cqHolder = new Text(); try { - Class pClass = Class.forName(options.get(P_COMPARE_CLASS)); - Class cClazz = Class.forName(options.get(COMPARE_OPT_CLASS)); + Class pClass = JavaUtils.loadClass(options.get(P_COMPARE_CLASS)); + Class cClazz = JavaUtils.loadClass(options.get(COMPARE_OPT_CLASS)); PrimitiveComparison pCompare = pClass.asSubclass(PrimitiveComparison.class).newInstance(); compOpt = cClazz.asSubclass(CompareOp.class).newInstance(); byte[] constant = getConstant(options); diff --git accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java index ef77697..aab2317 100644 --- accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java +++ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDeParameters.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.accumulo.columns.ColumnMapper; import org.apache.hadoop.hive.accumulo.columns.ColumnMapping; import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; @@ -117,7 +118,7 @@ protected AccumuloRowIdFactory createRowIdFactory(Configuration job, Properties String factoryClassName = tbl.getProperty(COMPOSITE_ROWID_FACTORY); if (factoryClassName != null) { log.info("Loading CompositeRowIdFactory class " + factoryClassName); - Class factoryClazz = Class.forName(factoryClassName); + Class factoryClazz = JavaUtils.loadClass(factoryClassName); return (AccumuloRowIdFactory) ReflectionUtils.newInstance(factoryClazz, job); } @@ -125,7 +126,7 @@ protected AccumuloRowIdFactory createRowIdFactory(Configuration job, Properties String keyClassName = tbl.getProperty(COMPOSITE_ROWID_CLASS); if (keyClassName != null) { log.info("Loading CompositeRowId class " + keyClassName); - Class keyClass = Class.forName(keyClassName); + Class keyClass = JavaUtils.loadClass(keyClassName); Class compositeRowIdClass = keyClass .asSubclass(AccumuloCompositeRowId.class); return new CompositeAccumuloRowIdFactory(compositeRowIdClass); diff --git common/src/java/org/apache/hadoop/hive/common/JavaUtils.java common/src/java/org/apache/hadoop/hive/common/JavaUtils.java index 9aa917c..e84de9a 100644 --- common/src/java/org/apache/hadoop/hive/common/JavaUtils.java +++ common/src/java/org/apache/hadoop/hive/common/JavaUtils.java @@ -70,6 +70,14 @@ public static ClassLoader getClassLoader() { return classLoader; } + public static Class loadClass(String className) throws ClassNotFoundException { + return loadClass(className, true); + } + + public static Class loadClass(String className, boolean init) throws ClassNotFoundException { + return Class.forName(className, init, getClassLoader()); + } + public static void closeClassLoadersTo(ClassLoader current, ClassLoader stop) { if (!isValidHierarchy(current, stop)) { return; diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java index 9f2f02f..3bcc5c0 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeHelper.java @@ -36,6 +36,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; @@ -466,7 +467,7 @@ private static void generateAvroStructFromClass(String serClassName, StringBuild throws SerDeException { Class serClass; try { - serClass = Class.forName(serClassName); + serClass = JavaUtils.loadClass(serClassName); } catch (ClassNotFoundException e) { throw new SerDeException("Error obtaining descriptor for " + serClassName, e); } @@ -562,7 +563,7 @@ private static String filter(String name) { Class keyClass; try { - keyClass = Class.forName(compKeyClassName); + keyClass = JavaUtils.loadClass(compKeyClassName); keyFactory = new CompositeHBaseKeyFactory(keyClass); } catch (Exception e) { throw new SerDeException(e); diff --git hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java index a43520c..9dc25a2 100644 --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDeParameters.java @@ -25,6 +25,7 @@ import org.apache.avro.Schema; import org.apache.avro.reflect.ReflectData; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.hbase.ColumnMappings.ColumnMapping; import org.apache.hadoop.hive.hbase.struct.AvroHBaseValueFactory; import org.apache.hadoop.hive.hbase.struct.DefaultHBaseValueFactory; @@ -201,7 +202,7 @@ private static HBaseKeyFactory createKeyFactory(Configuration job, Properties tb if (configuration != null) { return configuration.getClassByName(className); } - return Class.forName(className); + return JavaUtils.loadClass(className); } private List initValueFactories(Configuration conf, Properties tbl) diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java index bfa8657..5a95467 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java @@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler; @@ -56,9 +57,9 @@ private Class serDeClass; public FosterStorageHandler(String ifName, String ofName, String serdeName) throws ClassNotFoundException { - this((Class) Class.forName(ifName), - (Class) Class.forName(ofName), - (Class) Class.forName(serdeName)); + this((Class) JavaUtils.loadClass(ifName), + (Class) JavaUtils.loadClass(ofName), + (Class) JavaUtils.loadClass(serdeName)); } public FosterStorageHandler(Class ifClass, diff --git hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java index d3d5a0f..bcedb3a 100644 --- hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java +++ hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatSplit.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.lang.reflect.Constructor; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; @@ -142,7 +143,7 @@ public void readFields(DataInput input) throws IOException { org.apache.hadoop.mapred.InputSplit split; try { Class splitClass = - (Class) Class.forName(baseSplitClassName); + (Class) JavaUtils.loadClass(baseSplitClassName); //Class.forName().newInstance() does not work if the underlying //InputSplit has package visibility diff --git hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java index d9641e7..1090d68 100644 --- hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java +++ hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/messaging/MessageFactory.java @@ -19,6 +19,7 @@ package org.apache.hive.hcatalog.messaging; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.classification.InterfaceAudience; import org.apache.hadoop.hive.common.classification.InterfaceStability; import org.apache.hadoop.hive.conf.HiveConf; @@ -66,7 +67,7 @@ public static MessageFactory getInstance() { private static MessageFactory getInstance(String className) { try { - return (MessageFactory)ReflectionUtils.newInstance(Class.forName(className), hiveConf); + return (MessageFactory)ReflectionUtils.newInstance(JavaUtils.loadClass(className), hiveConf); } catch (ClassNotFoundException classNotFound) { throw new IllegalStateException("Could not construct MessageFactory implementation: ", classNotFound); diff --git hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java index 8c4bca0..1c85ab5 100644 --- hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java +++ hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java @@ -22,6 +22,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.api.MetaException; @@ -70,7 +71,7 @@ protected AbstractRecordWriter(HiveEndPoint endPoint, HiveConf conf) + endPoint); } String outFormatName = this.tbl.getSd().getOutputFormat(); - outf = (AcidOutputFormat) ReflectionUtils.newInstance(Class.forName(outFormatName), conf); + outf = (AcidOutputFormat) ReflectionUtils.newInstance(JavaUtils.loadClass(outFormatName), conf); } catch (MetaException e) { throw new ConnectionError(endPoint, e); } catch (NoSuchObjectException e) { diff --git hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java index 36b64da..d0e7ac6 100644 --- hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java +++ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java @@ -26,6 +26,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hive.hcatalog.templeton.JsonBuilder; /** @@ -70,7 +71,7 @@ public static TempletonStorage getStorageInstance(Configuration conf) { TempletonStorage storage = null; try { storage = (TempletonStorage) - Class.forName(conf.get(TempletonStorage.STORAGE_CLASS)) + JavaUtils.loadClass(conf.get(TempletonStorage.STORAGE_CLASS)) .newInstance(); } catch (Exception e) { LOG.warn("No storage method found: " + e.getMessage()); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java index 3a2a6ee..3309bb5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java @@ -40,6 +40,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.CompressionUtils; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.common.LogUtils.LogInitializationException; import org.apache.hadoop.hive.conf.HiveConf; @@ -239,8 +240,8 @@ public int execute(DriverContext driverContext) { job.setMapOutputValueClass(BytesWritable.class); try { - job.setPartitionerClass((Class) (Class.forName(HiveConf.getVar(job, - HiveConf.ConfVars.HIVEPARTITIONER)))); + String partitioner = HiveConf.getVar(job, ConfVars.HIVEPARTITIONER); + job.setPartitionerClass((Class) JavaUtils.loadClass(partitioner)); } catch (ClassNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } @@ -286,7 +287,7 @@ public int execute(DriverContext driverContext) { LOG.info("Using " + inpFormat); try { - job.setInputFormat((Class) (Class.forName(inpFormat))); + job.setInputFormat((Class) JavaUtils.loadClass(inpFormat)); } catch (ClassNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java index adb50f0..92625f2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java @@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.SerDe; import org.apache.hadoop.hive.serde2.SerDeException; @@ -194,7 +195,7 @@ private MapJoinPersistableTableContainer create( try { @SuppressWarnings("unchecked") Class clazz = - (Class)Class.forName(name); + (Class) JavaUtils.loadClass(name); Constructor constructor = clazz.getDeclaredConstructor(Map.class); return constructor.newInstance(metaData); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java index 4be35aa..9ab94c2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkPlanGenerator.java @@ -27,6 +27,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.io.merge.MergeFileMapper; import org.apache.hadoop.hive.ql.io.merge.MergeFileOutputFormat; import org.apache.hadoop.hive.ql.io.merge.MergeFileWork; @@ -158,7 +159,7 @@ private SparkTran generateParentTran(SparkPlan sparkPlan, SparkWork sparkWork, Class inputFormatClass; try { - inputFormatClass = Class.forName(inpFormat); + inputFormatClass = JavaUtils.loadClass(inpFormat); } catch (ClassNotFoundException e) { String message = "Failed to load specified input format class:" + inpFormat; @@ -226,7 +227,7 @@ private JobConf cloneJobConf(BaseWork work) throws Exception { HiveConf.setVar(cloned, HiveConf.ConfVars.PLAN, ""); try { cloned.setPartitionerClass((Class) - (Class.forName(HiveConf.getVar(cloned, HiveConf.ConfVars.HIVEPARTITIONER)))); + JavaUtils.loadClass(HiveConf.getVar(cloned, HiveConf.ConfVars.HIVEPARTITIONER))); } catch (ClassNotFoundException e) { String msg = "Could not find partitioner class: " + e.getMessage() + " which is specified by: " + HiveConf.ConfVars.HIVEPARTITIONER.varname; diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java index ce3b1d6..52c36eb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HivePreWarmProcessor.java @@ -22,6 +22,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.tez.common.TezUtils; @@ -99,7 +100,7 @@ public void run(Map inputs, * in hive-exec.jar. These are the relatively safe ones - operators & io classes. */ if(klass.indexOf("vector") != -1 || klass.indexOf("Operator") != -1) { - Class.forName(klass); + JavaUtils.loadClass(klass); } } } diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java index afe83d9..a6f4b55 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java @@ -28,6 +28,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.plan.MapWork; @@ -114,7 +115,7 @@ public HiveSplitGenerator() { if (groupingEnabled) { // Need to instantiate the realInputFormat InputFormat inputFormat = - (InputFormat) ReflectionUtils.newInstance(Class.forName(realInputFormatName), + (InputFormat) ReflectionUtils.newInstance(JavaUtils.loadClass(realInputFormatName), jobConf); int totalResource = rootInputContext.getTotalAvailableResource().getMemory(); diff --git ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java index 0ca5d22..1dbe230 100644 --- ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java +++ ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java @@ -28,7 +28,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.StatsSetupConst; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -220,7 +220,7 @@ public void generateIndexQuery(List indexes, ExprNodeDesc predicate, // We can only perform a binary search with HiveInputFormat and CombineHiveInputFormat // and BucketizedHiveInputFormat try { - if (!HiveInputFormat.class.isAssignableFrom(Class.forName(inputFormat))) { + if (!HiveInputFormat.class.isAssignableFrom(JavaUtils.loadClass(inputFormat))) { work = null; break; } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java index ede3b6e..aa607cc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java @@ -21,6 +21,7 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.mr.ExecMapper; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.CombineHiveInputSplit; import org.apache.hadoop.io.Writable; @@ -50,7 +51,7 @@ public CombineHiveRecordReader(InputSplit split, Configuration conf, String inputFormatClassName = hsplit.inputFormatClassName(); Class inputFormatClass = null; try { - inputFormatClass = Class.forName(inputFormatClassName); + inputFormatClass = JavaUtils.loadClass(inputFormatClassName); } catch (ClassNotFoundException e) { throw new IOException("CombineHiveRecordReader: class not found " + inputFormatClassName); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java index e2ae25b..d06f372 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; import org.apache.hadoop.hive.ql.exec.Operator; @@ -232,8 +233,8 @@ public static RecordWriter getHiveRecordWriter(JobConf jc, jc_output = new JobConf(jc); String codecStr = conf.getCompressCodec(); if (codecStr != null && !codecStr.trim().equals("")) { - Class codec = (Class) Class - .forName(codecStr); + Class codec = + (Class) JavaUtils.loadClass(codecStr); FileOutputFormat.setOutputCompressorClass(jc_output, codec); } String type = conf.getCompressType(); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java index 159d3ab..5a4f1e1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.io.IOConstants; @@ -286,7 +287,7 @@ static CompressionCodec createCodec(CompressionKind kind) { try { Class lzo = (Class) - Class.forName("org.apache.hadoop.hive.ql.io.orc.LzoCodec"); + JavaUtils.loadClass("org.apache.hadoop.hive.ql.io.orc.LzoCodec"); return lzo.newInstance(); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("LZO is not available.", e); diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java index 0f7e833..f4ffdd6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanTask.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; @@ -144,8 +145,7 @@ public int execute(DriverContext driverContext) { LOG.info("Using " + inpFormat); try { - job.setInputFormat((Class) (Class - .forName(inpFormat))); + job.setInputFormat((Class) JavaUtils.loadClass(inpFormat)); } catch (ClassNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java index 73c6dcc..4633820 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateTask.java @@ -24,6 +24,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.DriverContext; @@ -120,8 +121,7 @@ public int execute(DriverContext driverContext) { LOG.info("Using " + inpFormat); try { - job.setInputFormat((Class) (Class - .forName(inpFormat))); + job.setInputFormat((Class) JavaUtils.loadClass(inpFormat)); } catch (ClassNotFoundException e) { throw new RuntimeException(e.getMessage(), e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java index ff34682..9509f8e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/AbstractSMBJoinProc.java @@ -26,6 +26,7 @@ import java.util.Set; import java.util.Stack; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -440,10 +441,10 @@ protected boolean canConvertJoinToBucketMapJoin( Class bigTableMatcherClass = null; try { + String selector = HiveConf.getVar(pGraphContext.getConf(), + HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN_BIGTABLE_SELECTOR); bigTableMatcherClass = - (Class) - (Class.forName(HiveConf.getVar(pGraphContext.getConf(), - HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN_BIGTABLE_SELECTOR))); + (Class) JavaUtils.loadClass(selector); } catch (ClassNotFoundException e) { throw new SemanticException(e.getMessage()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java index 6bef5f5..4a1bd15 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConvertJoinMapJoin.java @@ -28,6 +28,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.AppMasterEventOperator; import org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator; @@ -181,10 +182,10 @@ private Object checkAndConvertSMBJoin(OptimizeTezProcContext context, JoinOperat } Class bigTableMatcherClass = null; try { + String selector = HiveConf.getVar(context.parseContext.getConf(), + HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN_BIGTABLE_SELECTOR); bigTableMatcherClass = - (Class) (Class.forName(HiveConf.getVar( - context.parseContext.getConf(), - HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN_BIGTABLE_SELECTOR))); + (Class) JavaUtils.loadClass(selector); } catch (ClassNotFoundException e) { throw new SemanticException(e.getMessage()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 2b23559..84e1906 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -45,6 +45,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -1036,7 +1037,7 @@ private void analyzeCreateIndex(ASTNode ast) throws SemanticException { typeName = indexType.getHandlerClsName(); } else { try { - Class.forName(typeName); + JavaUtils.loadClass(typeName); } catch (Exception e) { throw new SemanticException("class name provided for index handler not found.", e); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java index f75bec5..830a8eb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java @@ -27,7 +27,7 @@ import java.util.Properties; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.PTFPartition; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -288,7 +288,7 @@ private TableFunctionResolver constructResolver(String className) throws HiveExc try { @SuppressWarnings("unchecked") Class rCls = (Class) - Class.forName(className); + JavaUtils.loadClass(className); return ReflectionUtils.newInstance(rCls, null); } catch (Exception e) { throw new HiveException(e); diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java index b62ffed..b9056f7 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java @@ -31,6 +31,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; @@ -143,7 +144,7 @@ public static TableDesc getDefaultTableDesc(CreateTableDesc localDirectoryDesc, serdeConstants.SERIALIZATION_LIB, localDirectoryDesc.getSerName()); } if (localDirectoryDesc.getOutputFormat() != null){ - ret.setOutputFileFormatClass(Class.forName(localDirectoryDesc.getOutputFormat())); + ret.setOutputFileFormatClass(JavaUtils.loadClass(localDirectoryDesc.getOutputFormat())); } if (localDirectoryDesc.getNullFormat() != null) { properties.setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT, @@ -306,7 +307,7 @@ public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols, try { if (crtTblDesc.getSerName() != null) { - Class c = Class.forName(crtTblDesc.getSerName()); + Class c = JavaUtils.loadClass(crtTblDesc.getSerName()); serdeClass = c; } @@ -355,8 +356,8 @@ public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols, // replace the default input & output file format with those found in // crtTblDesc - Class c1 = Class.forName(crtTblDesc.getInputFormat()); - Class c2 = Class.forName(crtTblDesc.getOutputFormat()); + Class c1 = JavaUtils.loadClass(crtTblDesc.getInputFormat()); + Class c2 = JavaUtils.loadClass(crtTblDesc.getOutputFormat()); Class in_class = c1; Class out_class = c2; diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java index e26031c..c17ce23 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsAggregator.java @@ -32,6 +32,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.Utilities; @@ -69,7 +70,7 @@ public boolean connect(Configuration hiveconf, Task sourceTask) { this.sourceTask = sourceTask; try { - Class.forName(driver).newInstance(); + JavaUtils.loadClass(driver).newInstance(); } catch (Exception e) { LOG.error("Error during instantiating JDBC driver " + driver + ". ", e); return false; diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java index 32826e7..afeed9c 100644 --- ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java +++ ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsPublisher.java @@ -35,6 +35,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.stats.StatsPublisher; @@ -69,7 +70,7 @@ public boolean connect(Configuration hiveconf) { String driver = HiveConf.getVar(hiveconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER); try { - Class.forName(driver).newInstance(); + JavaUtils.loadClass(driver).newInstance(); } catch (Exception e) { LOG.error("Error during instantiating JDBC driver " + driver + ". ", e); return false; @@ -272,7 +273,7 @@ public boolean init(Configuration hconf) { this.hiveconf = hconf; connectionString = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSDBCONNECTIONSTRING); String driver = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER); - Class.forName(driver).newInstance(); + JavaUtils.loadClass(driver).newInstance(); synchronized(DriverManager.class) { DriverManager.setLoginTimeout(timeout); conn = DriverManager.getConnection(connectionString); diff --git ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java index d3ad515..b8e18ea 100644 --- ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java +++ ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java @@ -24,6 +24,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.common.ValidTxnList; import org.apache.hadoop.hive.common.ValidReadTxnList; import org.apache.hadoop.hive.conf.HiveConf; @@ -647,7 +648,7 @@ public String toString() { private static T instantiate(Class classType, String classname) throws IOException { T t = null; try { - Class c = Class.forName(classname); + Class c = JavaUtils.loadClass(classname); Object o = c.newInstance(); if (classType.isAssignableFrom(o.getClass())) { t = (T)o; diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java index 89496ea..17cab51 100644 --- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java +++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFReflect.java @@ -20,6 +20,7 @@ import java.lang.reflect.Method; import java.util.Arrays; +import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException; @@ -102,7 +103,7 @@ public Object evaluate(DeferredObject[] arguments) throws HiveException { className = ObjectInspectorUtils.copyToStandardObject(newClassName, inputClassNameOI); String classNameString = classNameOI.getPrimitiveJavaObject(className); try { - c = Class.forName(classNameString); + c = JavaUtils.loadClass(classNameString); } catch (ClassNotFoundException ex) { throw new HiveException("UDFReflect evaluate ", ex); }