Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (revision 1127872) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (working copy) @@ -919,7 +919,7 @@ assertTrue(failure.failOnCommit && failure.failOnDeleteFile); w.rollback(); assertFalse(dir.fileExists("1.fnx")); - // FIXME: on windows, this often fails! assertEquals(0, dir.listAll().length); + assertEquals(0, dir.listAll().length); dir.close(); } } Index: lucene/src/java/org/apache/lucene/index/DocInverterPerField.java =================================================================== --- lucene/src/java/org/apache/lucene/index/DocInverterPerField.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/DocInverterPerField.java (working copy) @@ -53,8 +53,11 @@ @Override void abort() { - consumer.abort(); - endConsumer.abort(); + try { + consumer.abort(); + } finally { + endConsumer.abort(); + } } @Override Index: lucene/src/java/org/apache/lucene/index/SegmentInfos.java =================================================================== --- lucene/src/java/org/apache/lucene/index/SegmentInfos.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/SegmentInfos.java (working copy) @@ -40,6 +40,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.NoSuchDirectoryException; +import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ThreadInterruptedException; /** @@ -323,18 +324,14 @@ SegmentInfosWriter infosWriter = codecs.getSegmentInfosWriter(); segnOutput = infosWriter.writeInfos(directory, segmentFileName, this); infosWriter.prepareCommit(segnOutput); - success = true; pendingSegnOutput = segnOutput; + success = true; } finally { if (!success) { // We hit an exception above; try to close the file // but suppress any exception: + IOUtils.closeSafelyNoException(segnOutput); try { - segnOutput.close(); - } catch (Throwable t) { - // Suppress so we keep throwing the original exception - } - try { // Try not to leave a truncated segments_N file in // the index: directory.deleteFile(segmentFileName); Index: lucene/src/java/org/apache/lucene/index/TermsHash.java =================================================================== --- lucene/src/java/org/apache/lucene/index/TermsHash.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/TermsHash.java (working copy) @@ -134,11 +134,23 @@ @Override void finishDocument() throws IOException { + boolean success = false; try { consumer.finishDocument(this); + success = true; } finally { - if (nextTermsHash != null) { - nextTermsHash.consumer.finishDocument(nextTermsHash); + if (!success) { + abort(); + } else if (nextTermsHash != null) { + success = false; + try { + nextTermsHash.consumer.finishDocument(nextTermsHash); + success = true; + } finally { + if (!success) { + abort(); + } + } } } } Index: lucene/src/java/org/apache/lucene/index/SegmentMerger.java =================================================================== --- lucene/src/java/org/apache/lucene/index/SegmentMerger.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/SegmentMerger.java (working copy) @@ -546,14 +546,13 @@ } codec = segmentWriteState.segmentCodecs.codec(); final FieldsConsumer consumer = codec.fieldsConsumer(segmentWriteState); - - // NOTE: this is silly, yet, necessary -- we create a - // MultiBits as our skip docs only to have it broken - // apart when we step through the docs enums in - // MultiDocsEnum. - mergeState.multiDeletedDocs = new MultiBits(bits, bitsStarts); - try { + // NOTE: this is silly, yet, necessary -- we create a + // MultiBits as our skip docs only to have it broken + // apart when we step through the docs enums in + // MultiDocsEnum. + mergeState.multiDeletedDocs = new MultiBits(bits, bitsStarts); + consumer.merge(mergeState, new MultiFields(fields.toArray(Fields.EMPTY_ARRAY), slices.toArray(ReaderUtil.Slice.EMPTY_ARRAY))); Index: lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java =================================================================== --- lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/DocFieldProcessor.java (working copy) @@ -84,20 +84,45 @@ @Override public void abort() { - for(int i=0;i it = consumers.iterator(); IOException err = null; while (it.hasNext()) { Index: lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java =================================================================== --- lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java (working copy) @@ -35,11 +35,19 @@ throws IOException { // Open files for TermVector storage tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION)); - tvx.writeInt(TermVectorsReader.FORMAT_CURRENT); - tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION)); - tvd.writeInt(TermVectorsReader.FORMAT_CURRENT); - tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION)); - tvf.writeInt(TermVectorsReader.FORMAT_CURRENT); + boolean success = false; + try { + tvx.writeInt(TermVectorsReader.FORMAT_CURRENT); + tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION)); + tvd.writeInt(TermVectorsReader.FORMAT_CURRENT); + tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION)); + tvf.writeInt(TermVectorsReader.FORMAT_CURRENT); + success = true; + } finally { + if (!success) { + IOUtils.closeSafelyNoException(tvx, tvd, tvf); + } + } this.fieldInfos = fieldInfos; } Index: lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java =================================================================== --- lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java (working copy) @@ -38,6 +38,7 @@ import org.apache.lucene.index.codecs.TermsIndexWriterBase; import org.apache.lucene.index.codecs.standard.StandardCodec; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; /** This codec "inlines" the postings for terms that have * low docFreq. It wraps another codec, which is used for @@ -81,7 +82,7 @@ success = true; } finally { if (!success) { - pulsingWriter.close(); + IOUtils.closeSafelyNoException(pulsingWriter); } } @@ -93,11 +94,7 @@ return ret; } finally { if (!success) { - try { - pulsingWriter.close(); - } finally { - indexWriter.close(); - } + IOUtils.closeSafelyNoException(pulsingWriter, indexWriter); } } } Index: lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java =================================================================== --- lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java (revision 1127872) +++ lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingPostingsWriterImpl.java (working copy) @@ -71,8 +71,6 @@ * for this term) is <= maxPositions, then the postings are * inlined into terms dict */ public PulsingPostingsWriterImpl(int maxPositions, PostingsWriterBase wrappedPostingsWriter) throws IOException { - super(); - pending = new Position[maxPositions]; for(int i=0;i