Index: ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (revision 773065) +++ ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (working copy) @@ -62,20 +62,21 @@ * The reason that it derives from FileSplit is to make sure "map.input.file" in MapTask. */ public static class HiveInputSplit extends FileSplit implements InputSplit { - InputSplit inputSplit; String inputFormatClassName; + JobConf job; public HiveInputSplit() { // This is the only public constructor of FileSplit super((Path)null, 0, 0, (String[])null); } - public HiveInputSplit(InputSplit inputSplit, String inputFormatClassName) { + public HiveInputSplit(InputSplit inputSplit, String inputFormatClassName, JobConf job) { // This is the only public constructor of FileSplit super((Path)null, 0, 0, (String[])null); this.inputSplit = inputSplit; this.inputFormatClassName = inputFormatClassName; + this.job = job; } public InputSplit getInputSplit() { @@ -119,6 +120,8 @@ } public void readFields(DataInput in) throws IOException { + job = new JobConf(); + job.readFields(in); String inputSplitClassName = in.readUTF(); try { inputSplit = (InputSplit) ReflectionUtils.newInstance(Class.forName(inputSplitClassName), job); @@ -131,23 +134,24 @@ } public void write(DataOutput out) throws IOException { + job.write(out); out.writeUTF(inputSplit.getClass().getName()); inputSplit.write(out); out.writeUTF(inputFormatClassName); } } - static JobConf job; + private JobConf job; public void configure(JobConf job) { - HiveInputFormat.job = job; + this.job = job; } /** * A cache of InputFormat instances. */ private static Map> inputFormats; - static InputFormat getInputFormatFromCache(Class inputFormatClass) throws IOException { + static InputFormat getInputFormatFromCache(Class inputFormatClass, JobConf job) throws IOException { if (inputFormats == null) { inputFormats = new HashMap>(); } @@ -179,7 +183,7 @@ throw new IOException("cannot find class " + inputFormatClassName); } - InputFormat inputFormat = getInputFormatFromCache(inputFormatClass); + InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); return new HiveRecordReader(inputFormat.getRecordReader(inputSplit, job, reporter)); } @@ -208,13 +212,13 @@ tableDesc table = getTableDescFromPath(dir); // create a new InputFormat instance if this is the first time to see this class Class inputFormatClass = table.getInputFileFormatClass(); - InputFormat inputFormat = getInputFormatFromCache(inputFormatClass); + InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); FileInputFormat.setInputPaths(newjob, dir); newjob.setInputFormat(inputFormat.getClass()); InputSplit[] iss = inputFormat.getSplits(newjob, numSplits/dirs.length); for(InputSplit is: iss) { - result.add(new HiveInputSplit(is, inputFormatClass.getName())); + result.add(new HiveInputSplit(is, inputFormatClass.getName(), newjob)); } } return result.toArray(new HiveInputSplit[result.size()]); @@ -234,7 +238,7 @@ for (Path dir: dirs) { tableDesc table = getTableDescFromPath(dir); // create a new InputFormat instance if this is the first time to see this class - InputFormat inputFormat = getInputFormatFromCache(table.getInputFileFormatClass()); + InputFormat inputFormat = getInputFormatFromCache(table.getInputFileFormatClass(), newjob); FileInputFormat.setInputPaths(newjob, dir); newjob.setInputFormat(inputFormat.getClass());