diff --git src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java index f2b3890..5acfd62 100644 --- src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java +++ src/java/org/apache/hcatalog/mapreduce/InputJobInfo.java @@ -19,9 +19,15 @@ package org.apache.hcatalog.mapreduce; import org.apache.hadoop.hive.metastore.MetaStoreUtils; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.List; import java.util.Properties; +import java.util.zip.Deflater; +import java.util.zip.DeflaterOutputStream; +import java.util.zip.InflaterInputStream; /** The class used to serialize and store the information read from the metadata server */ public class InputJobInfo implements Serializable { @@ -40,7 +46,7 @@ public class InputJobInfo implements Serializable { private String filter; /** The list of partitions matching the filter. */ - private List partitions; + transient private List partitions; /** implementation specific job properties */ private Properties properties; @@ -134,4 +140,23 @@ public class InputJobInfo implements Serializable { public Properties getProperties() { return properties; } + + private void writeObject(ObjectOutputStream oos) + throws IOException { + oos.defaultWriteObject(); + Deflater def = new Deflater(Deflater.BEST_COMPRESSION); + ObjectOutputStream partInfoWriter = + new ObjectOutputStream(new DeflaterOutputStream(oos, def)); + partInfoWriter.writeObject(partitions); + partInfoWriter.close(); + } + + @SuppressWarnings("unchecked") + private void readObject(ObjectInputStream ois) + throws IOException, ClassNotFoundException { + ois.defaultReadObject(); + ObjectInputStream partInfoReader = + new ObjectInputStream(new InflaterInputStream(ois)); + partitions = (List)partInfoReader.readObject(); + } }