Index: src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (revision 1298977) +++ src/java/org/apache/hcatalog/mapreduce/FileOutputCommitterContainer.java (revision ) @@ -125,6 +125,9 @@ @Override public void setupJob(JobContext context) throws IOException { if(getBaseOutputCommitter() != null && !dynamicPartitioningUsed) { + // TODO: Hack! Pig messes up mapred.output.dir, when 2 Storers are used in the same Pig script. + // Workaround: Set mapred.output.dir from OutputJobInfo. + context.getConfiguration().set("mapred.output.dir", jobInfo.getLocation()); getBaseOutputCommitter().setupJob(HCatMapRedUtil.createJobContext(context)); } // in dynamic usecase, called through FileRecordWriterContainer @@ -140,13 +143,16 @@ @Override public void abortJob(JobContext jobContext, State state) throws IOException { org.apache.hadoop.mapred.JobContext - marpedJobContext = HCatMapRedUtil.createJobContext(jobContext); + mapRedJobContext = HCatMapRedUtil.createJobContext(jobContext); if (dynamicPartitioningUsed){ discoverPartitions(jobContext); } if(getBaseOutputCommitter() != null && !dynamicPartitioningUsed) { - getBaseOutputCommitter().abortJob(marpedJobContext, state); + // TODO: Hack! Pig messes up mapred.output.dir, when 2 Storers are used in the same Pig script. + // Workaround: Set mapred.output.dir from OutputJobInfo. + mapRedJobContext.getConfiguration().set("mapred.output.dir", jobInfo.getLocation()); + getBaseOutputCommitter().abortJob(mapRedJobContext, state); } else if (dynamicPartitioningUsed){ for(JobContext currContext : contextDiscoveredByPath.values()){ @@ -219,6 +225,9 @@ discoverPartitions(jobContext); } if(getBaseOutputCommitter() != null && !dynamicPartitioningUsed) { + // TODO: Hack! Pig messes up mapred.output.dir, when 2 Storers are used in the same Pig script. + // Workaround: Set mapred.output.dir from OutputJobInfo. + jobContext.getConfiguration().set("mapred.output.dir", jobInfo.getLocation()); getBaseOutputCommitter().commitJob(HCatMapRedUtil.createJobContext(jobContext)); } // create _SUCCESS FILE if so requested. @@ -256,7 +265,10 @@ if( table.getPartitionKeys().size() == 0 ) { //non partitioned table if(getBaseOutputCommitter() != null && !dynamicPartitioningUsed) { + // TODO: Hack! Pig messes up mapred.output.dir, when 2 Storers are used in the same Pig script. + // Workaround: Set mapred.output.dir from OutputJobInfo. + context.getConfiguration().set("mapred.output.dir", jobInfo.getLocation()); - getBaseOutputCommitter().cleanupJob(HCatMapRedUtil.createJobContext(context)); + getBaseOutputCommitter().cleanupJob(HCatMapRedUtil.createJobContext(context)); } else if (dynamicPartitioningUsed){ for(JobContext currContext : contextDiscoveredByPath.values()){ Index: src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java IDEA additional info: Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP <+>UTF-8 =================================================================== --- src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (revision 1298977) +++ src/java/org/apache/hcatalog/mapreduce/HCatOutputFormat.java (revision ) @@ -231,7 +231,6 @@ public RecordWriter, HCatRecord> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { - getOutputFormat(context).getOutputCommitter(context).setupJob(context); return getOutputFormat(context).getRecordWriter(context); }