diff --git metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index d0e94bf..3deed45 100644 --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -310,42 +310,6 @@ public static boolean updatePartitionStatsFast(Partition part, Warehouse wh, /** * getDeserializer * - * Get the Deserializer for a table given its name and properties. - * - * @param conf - * hadoop config - * @param schema - * the properties to use to instantiate the deserializer - * @return - * Returns instantiated deserializer by looking up class name of deserializer stored in passed - * in properties. Also, initializes the deserializer with schema stored in passed in properties. - * @exception MetaException - * if any problems instantiating the Deserializer - * - * todo - this should move somewhere into serde.jar - * - */ - static public Deserializer getDeserializer(Configuration conf, - Properties schema) throws MetaException { - try { - String clazzName = schema.getProperty(serdeConstants.SERIALIZATION_LIB); - if(clazzName == null) { - throw new IllegalStateException("Property " + serdeConstants.SERIALIZATION_LIB + " cannot be null"); - } - Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(clazzName) - .asSubclass(Deserializer.class), conf); - deserializer.initialize(conf, schema); - return deserializer; - } catch (Exception e) { - LOG.error("error in initSerDe: " + e.getClass().getName() + " " - + e.getMessage(), e); - throw new MetaException(e.getClass().getName() + " " + e.getMessage()); - } - } - - /** - * getDeserializer - * * Get the Deserializer for a table. * * @param conf diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java index d2b2526..fc9b7e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java @@ -400,7 +400,7 @@ private void getNextPath() throws Exception { this.inputSplits = inputSplits; splitNum = 0; - serde = partDesc.getDeserializer(); + serde = partDesc.getDeserializer(job); serde.initialize(job, partDesc.getOverlayedProperties()); if (currTbl != null) { @@ -646,7 +646,7 @@ public ObjectInspector getOutputObjectInspector() throws HiveException { // Get the OI corresponding to all the partitions for (PartitionDesc listPart : listParts) { partition = listPart; - Deserializer partSerde = listPart.getDeserializer(); + Deserializer partSerde = listPart.getDeserializer(job); partSerde.initialize(job, listPart.getOverlayedProperties()); partitionedTableOI = ObjectInspectorConverters.getConvertedOI( diff --git ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java index 56d9808..d6344e4 100644 --- ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java +++ ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveInputFormat.java @@ -307,8 +307,12 @@ public int hashCode() { Class inputFormatClass = part.getInputFileFormatClass(); String inputFormatClassName = inputFormatClass.getName(); InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); - String deserializerClassName = part.getDeserializer() == null ? null - : part.getDeserializer().getClass().getName(); + String deserializerClassName = null; + try { + deserializerClassName = part.getDeserializer(job).getClass().getName(); + } catch (Exception e) { + // ignore + } // Since there is no easy way of knowing whether MAPREDUCE-1597 is present in the tree or not, // we use a configuration variable for the same diff --git ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java index f4476a9..0fe260d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java @@ -260,19 +260,6 @@ final public Deserializer getDeserializer() { return deserializer; } - final public Deserializer getDeserializer(Properties props) { - if (deserializer == null) { - try { - deserializer = MetaStoreUtils.getDeserializer(Hive.get().getConf(), props); - } catch (HiveException e) { - throw new RuntimeException(e); - } catch (MetaException e) { - throw new RuntimeException(e); - } - } - return deserializer; - } - public Properties getSchema() { return MetaStoreUtils.getSchema(tPartition, table.getTTable()); } diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java index 8a7c3c4..fc65bb6 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java @@ -23,18 +23,18 @@ import java.util.LinkedHashMap; import java.util.Properties; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; import org.apache.hadoop.hive.ql.io.HiveOutputFormat; -import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.mapred.InputFormat; +import org.apache.hadoop.util.ReflectionUtils; /** * PartitionDesc. @@ -105,14 +105,19 @@ public void setPartSpec(final LinkedHashMap partSpec) { } /** - * Return a deserializer object corresponding to the tableDesc. + * Return a deserializer object corresponding to the partitionDesc. */ - public Deserializer getDeserializer() { - try { - return MetaStoreUtils.getDeserializer(Hive.get().getConf(), getProperties()); - } catch (Exception e) { - return null; + public Deserializer getDeserializer(Configuration conf) throws Exception { + Properties schema = getProperties(); + String clazzName = schema.getProperty(serdeConstants.SERIALIZATION_LIB); + if (clazzName == null) { + throw new IllegalStateException("Property " + serdeConstants.SERIALIZATION_LIB + + " cannot be null"); } + Deserializer deserializer = ReflectionUtils.newInstance(conf.getClassByName(clazzName) + .asSubclass(Deserializer.class), conf); + deserializer.initialize(conf, schema); + return deserializer; } public void setInputFileFormatClass(