Index: lucene/src/java/org/apache/lucene/index/values/Bytes.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/Bytes.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/Bytes.java	(working copy)
@@ -31,7 +31,6 @@
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.AttributeSource;
-import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.CodecUtil;
 import org.apache.lucene.util.IOUtils;
@@ -116,7 +115,7 @@
 
     if (fixedSize) {
       if (mode == Mode.STRAIGHT) {
-        return new FixedStraightBytesImpl.Writer(dir, id);
+        return new FixedStraightBytesImpl.Writer(dir, id, bytesUsed);
       } else if (mode == Mode.DEREF) {
         return new FixedDerefBytesImpl.Writer(dir, id, bytesUsed);
       } else if (mode == Mode.SORTED) {
@@ -337,37 +336,56 @@
   // TODO: open up this API?!
   static abstract class BytesWriterBase extends Writer {
     private final String id;
-    protected IndexOutput idxOut;
-    protected IndexOutput datOut;
+    private IndexOutput idxOut;
+    private IndexOutput datOut;
     protected BytesRef bytesRef;
-    protected final ByteBlockPool pool;
+    private final Directory dir;
+    private final String codecName;
+    private final int version;
 
     protected BytesWriterBase(Directory dir, String id, String codecName,
-        int version, boolean initIndex, ByteBlockPool pool,
+        int version,
         AtomicLong bytesUsed) throws IOException {
       super(bytesUsed);
       this.id = id;
-      this.pool = pool;
-      datOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
-            DATA_EXTENSION));
+      this.dir = dir;
+      this.codecName = codecName;
+      this.version = version;
+    }
+    
+    protected IndexOutput getDataOut() throws IOException {
+      if (datOut == null) {
+        boolean success = false;
+        try {
+          datOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
+              DATA_EXTENSION));
+          CodecUtil.writeHeader(datOut, codecName, version);
+          success = true;
+        } finally {
+          if (!success) {
+            IOUtils.closeSafely(true, datOut);
+          }
+        }
+      }
+      return datOut;
+    }
+
+    protected IndexOutput getIndexOut() throws IOException {
       boolean success = false;
       try {
-        CodecUtil.writeHeader(datOut, codecName, version);
-        if (initIndex) {
+        if (idxOut == null) {
           idxOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
               INDEX_EXTENSION));
           CodecUtil.writeHeader(idxOut, codecName, version);
-        } else {
-          idxOut = null;
         }
         success = true;
       } finally {
         if (!success) {
-          IOUtils.closeSafely(true, datOut, idxOut);
+          IOUtils.closeSafely(true, idxOut);
         }
       }
+      return idxOut;
     }
-
     /**
      * Must be called only with increasing docIDs. It's OK for some docIDs to be
      * skipped; they will be filled with 0 bytes.
@@ -376,15 +394,7 @@
     public abstract void add(int docID, BytesRef bytes) throws IOException;
 
     @Override
-    public void finish(int docCount) throws IOException {
-      try {
-        IOUtils.closeSafely(false, datOut, idxOut);
-      } finally {
-        if (pool != null) {
-          pool.reset();
-        }
-      }
-    }
+    public abstract void finish(int docCount) throws IOException;
 
     @Override
     protected void mergeDoc(int docID) throws IOException {
Index: lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java	(working copy)
@@ -25,11 +25,13 @@
 import org.apache.lucene.index.values.Bytes.BytesWriterBase;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefHash;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.ByteBlockPool.Allocator;
@@ -51,9 +53,7 @@
   static class Writer extends BytesWriterBase {
     private int size = -1;
     private int[] docToID;
-    private final BytesRefHash hash = new BytesRefHash(pool,
-        BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
-            BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
+    private final BytesRefHash hash;
     public Writer(Directory dir, String id, AtomicLong bytesUsed)
         throws IOException {
       this(dir, id, new DirectTrackingAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, bytesUsed),
@@ -62,11 +62,12 @@
 
     public Writer(Directory dir, String id, Allocator allocator,
         AtomicLong bytesUsed) throws IOException {
-      super(dir, id, CODEC_NAME, VERSION_CURRENT, true,
-          new ByteBlockPool(allocator), bytesUsed);
+      super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+      hash = new BytesRefHash(new ByteBlockPool(allocator),
+          BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
+              BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
       docToID = new int[1];
-      bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT); // TODO BytesRefHash
-                                                            // uses bytes too!
+      bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
     }
 
     @Override
@@ -75,20 +76,14 @@
         return;
       if (size == -1) {
         size = bytes.length;
-        datOut.writeInt(size);
       } else if (bytes.length != size) {
         throw new IllegalArgumentException("expected bytes size=" + size
             + " but got " + bytes.length);
       }
       int ord = hash.add(bytes);
-
-      if (ord >= 0) {
-        // new added entry
-        datOut.writeBytes(bytes.bytes, bytes.offset, bytes.length);
-      } else {
+      if (ord < 0) {
         ord = (-ord) - 1;
       }
-
       if (docID >= docToID.length) {
         final int size = docToID.length;
         docToID = ArrayUtil.grow(docToID, 1 + docID);
@@ -102,11 +97,25 @@
     // some last docs that we didn't see
     @Override
     public void finish(int docCount) throws IOException {
+      boolean success = false;
+      final int numValues = hash.size();
+      final IndexOutput datOut = getDataOut();
       try {
-        if (size == -1) {
-          datOut.writeInt(size);
+        datOut.writeInt(size);
+        BytesRef bytesRef = new BytesRef(size);
+        for (int i = 0; i < numValues; i++) {
+          hash.get(i, bytesRef);
+          datOut.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
         }
-        final int count = 1 + hash.size();
+        success = true;
+      } finally {
+        IOUtils.closeSafely(!success, datOut);
+        hash.close();
+      }
+      success = false;
+      final IndexOutput idxOut = getIndexOut();
+      try {
+        final int count = 1 + numValues;
         idxOut.writeInt(count - 1);
         // write index
         final PackedInts.Writer w = PackedInts.getWriter(idxOut, docCount,
@@ -120,9 +129,9 @@
           w.add(0);
         }
         w.finish();
+        success = true;
       } finally {
-        hash.close();
-        super.finish(docCount);
+        IOUtils.closeSafely(!success, idxOut);
         bytesUsed
             .addAndGet((-docToID.length) * RamUsageEstimator.NUM_BYTES_INT);
         docToID = null;
Index: lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java	(working copy)
@@ -27,12 +27,14 @@
 import org.apache.lucene.index.values.FixedDerefBytesImpl.Reader.DerefBytesEnum;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefHash;
 import org.apache.lucene.util.CodecUtil;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.ByteBlockPool.Allocator;
@@ -56,11 +58,8 @@
     private int size = -1;
     private int[] docToEntry;
     private final Comparator<BytesRef> comp;
+    private final BytesRefHash hash;
 
-    private final BytesRefHash hash = new BytesRefHash(pool,
-        BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
-            BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
-
     public Writer(Directory dir, String id, Comparator<BytesRef> comp,
         AtomicLong bytesUsed) throws IOException {
       this(dir, id, comp, new DirectTrackingAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, bytesUsed),
@@ -69,10 +68,12 @@
 
     public Writer(Directory dir, String id, Comparator<BytesRef> comp,
         Allocator allocator, AtomicLong bytesUsed) throws IOException {
-      super(dir, id, CODEC_NAME, VERSION_CURRENT, true,
-          new ByteBlockPool(allocator), bytesUsed);
+      super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+      ByteBlockPool pool = new ByteBlockPool(allocator);
+      hash = new BytesRefHash(pool, BytesRefHash.DEFAULT_CAPACITY,
+          new TrackingDirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY,
+              bytesUsed));
       docToEntry = new int[1];
-      // docToEntry[0] = -1;
       bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
       this.comp = comp;
     }
@@ -83,7 +84,6 @@
         return; // default - skip it
       if (size == -1) {
         size = bytes.length;
-        datOut.writeInt(size);
       } else if (bytes.length != size) {
         throw new IllegalArgumentException("expected bytes size=" + size
             + " but got " + bytes.length);
@@ -104,26 +104,34 @@
     // some last docs that we didn't see
     @Override
     public void finish(int docCount) throws IOException {
+      final IndexOutput datOut = getDataOut();
+      boolean success = false;
+      final int count = hash.size();
+      final int[] address = new int[count];
+
       try {
-        if (size == -1) {// no data added
-          datOut.writeInt(size);
-        }
+        datOut.writeInt(size);
         final int[] sortedEntries = hash.sort(comp);
-        final int count = hash.size();
-        int[] address = new int[count];
         // first dump bytes data, recording address as we go
+        final BytesRef bytesRef = new BytesRef(size);
         for (int i = 0; i < count; i++) {
           final int e = sortedEntries[i];
-          final BytesRef bytes = hash.get(e, new BytesRef());
+          final BytesRef bytes = hash.get(e, bytesRef);
           assert bytes.length == size;
           datOut.writeBytes(bytes.bytes, bytes.offset, bytes.length);
           address[e] = 1 + i;
         }
-
+        success = true;
+      } finally {
+        IOUtils.closeSafely(!success, datOut);
+        hash.close();
+      }
+      final IndexOutput idxOut = getIndexOut();
+      success = false;
+      try {
         idxOut.writeInt(count);
-
         // next write index
-        PackedInts.Writer w = PackedInts.getWriter(idxOut, docCount,
+        final PackedInts.Writer w = PackedInts.getWriter(idxOut, docCount,
             PackedInts.bitsRequired(count));
         final int limit;
         if (docCount > docToEntry.length) {
@@ -148,11 +156,10 @@
         }
         w.finish();
       } finally {
-        super.finish(docCount);
+        IOUtils.closeSafely(!success, idxOut);
         bytesUsed.addAndGet((-docToEntry.length)
             * RamUsageEstimator.NUM_BYTES_INT);
         docToEntry = null;
-        hash.close();
       }
     }
   }
Index: lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java	(working copy)
@@ -17,14 +17,20 @@
  * limitations under the License.
  */
 
+import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
+
 import java.io.IOException;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.lucene.index.values.Bytes.BytesBaseSource;
 import org.apache.lucene.index.values.Bytes.BytesReaderBase;
 import org.apache.lucene.index.values.Bytes.BytesWriterBase;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.AttributeSource;
+import org.apache.lucene.util.ByteBlockPool;
+import org.apache.lucene.util.ByteBlockPool.DirectTrackingAllocator;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
@@ -44,30 +50,77 @@
     private int size = -1;
     // start at -1 if the first added value is > 0
     private int lastDocID = -1;
-    private byte[] oneRecord;
+    private final ByteBlockPool pool;
+    private boolean merge;
+    private final int byteBlockSize;
+    private IndexOutput datOut;
 
-    public Writer(Directory dir, String id) throws IOException {
-      super(dir, id, CODEC_NAME, VERSION_CURRENT, false, null, null);
+    public Writer(Directory dir, String id, AtomicLong bytesUsed) throws IOException {
+      super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+      pool = new ByteBlockPool(new DirectTrackingAllocator(bytesUsed));
+      byteBlockSize = BYTE_BLOCK_SIZE;
     }
 
-
     @Override
     public void add(int docID, BytesRef bytes) throws IOException {
+      assert lastDocID < docID;
+      assert !merge;
       if (size == -1) {
+        if (bytes.length > BYTE_BLOCK_SIZE)
+          throw new IllegalArgumentException("bytes arrays > " + Short.MAX_VALUE + " are not supported");
         size = bytes.length;
-        datOut.writeInt(size);
-        oneRecord = new byte[size];
+        pool.nextBuffer();
       } else if (bytes.length != size) {
         throw new IllegalArgumentException("expected bytes size=" + size
             + " but got " + bytes.length);
       }
-      fill(docID);
-      assert bytes.bytes.length >= bytes.length;
-      datOut.writeBytes(bytes.bytes, bytes.offset, bytes.length);
+      if (lastDocID+1 < docID) {
+        advancePool(docID);
+      }
+      writeToPool(bytes);
+      lastDocID = docID;
     }
+    
+    private final void writeToPool(BytesRef bytes) {
+      assert !merge;
+      int overflow = (size + pool.byteUpto) - byteBlockSize;
+      do {
+        if (overflow <= 0) { 
+          System.arraycopy(bytes.bytes, bytes.offset, pool.buffer, pool.byteUpto, bytes.length);
+          pool.byteUpto += bytes.length;
+          break;
+        } else {
+          final int bytesToCopy = bytes.length-overflow;
+          System.arraycopy(bytes.bytes, bytes.offset, pool.buffer, pool.byteUpto, bytesToCopy);
+          bytes.offset += bytesToCopy;
+          bytes.length -= bytesToCopy;
+          pool.nextBuffer();
+          overflow = (overflow) - byteBlockSize;
+        }
+      }  while(true);
+    }
+    
+    private final void advancePool(int docID) {
+      assert !merge;
+      long numBytes = (docID - (lastDocID+1))*size;
+      while(numBytes > 0) {
+        if (numBytes + pool.byteUpto < byteBlockSize) {
+          pool.byteUpto += numBytes;
+          numBytes = 0;
+        } else {
+          numBytes -= byteBlockSize - pool.byteUpto;
+          pool.nextBuffer();
+        }
+      }
+      assert numBytes == 0;
+    }
 
     @Override
     protected void merge(MergeState state) throws IOException {
+      merge = true;
+      datOut = getDataOut();
+      boolean success = false;
+      try {
       if (state.bits == null && state.reader instanceof Reader) {
         Reader reader = (Reader) state.reader;
         final int maxDocs = reader.maxDoc;
@@ -77,48 +130,98 @@
         if (size == -1) {
           size = reader.size;
           datOut.writeInt(size);
-          oneRecord = new byte[size];
         }
-        fill(state.docBase);
+        if (lastDocID+1 < state.docBase) {
+          fill(datOut, state.docBase);
+          lastDocID = state.docBase-1;
+        }
         // TODO should we add a transfer to API to each reader?
         final IndexInput cloneData = reader.cloneData();
         try {
           datOut.copyBytes(cloneData, size * maxDocs);
         } finally {
-          cloneData.close();  
+          IOUtils.closeSafely(true, cloneData);  
         }
         
-        lastDocID += maxDocs - 1;
+        lastDocID += maxDocs;
       } else {
         super.merge(state);
       }
+      success = true;
+      } finally {
+        if (!success) {
+          IOUtils.closeSafely(!success, datOut);
+        }
+      }
     }
+    
+    
 
+    @Override
+    protected void mergeDoc(int docID) throws IOException {
+      assert lastDocID < docID;
+      if (size == -1) {
+        size = bytesRef.length;
+        datOut.writeInt(size);
+      }
+      assert size == bytesRef.length;
+      if (lastDocID+1 < docID) {
+        fill(datOut, docID);
+      }
+      datOut.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
+      lastDocID = docID;
+    }
+
+
+
     // Fills up to but not including this docID
-    private void fill(int docID) throws IOException {
+    private void fill(IndexOutput datOut, int docID) throws IOException {
       assert size >= 0;
-      for (int i = lastDocID + 1; i < docID; i++) {
-        datOut.writeBytes(oneRecord, size);
+      final long numBytes = (docID - (lastDocID+1))*size;
+      final byte zero = 0;
+      for (long i = 0; i < numBytes; i++) {
+        datOut.writeByte(zero);
       }
-      lastDocID = docID;
     }
 
     @Override
     public void finish(int docCount) throws IOException {
+      boolean success = false;
       try {
-        if (size == -1) {// no data added
-          datOut.writeInt(0);
+        if (!merge) {
+          // indexing path - no disk IO until here
+          assert datOut == null;
+          datOut = getDataOut();
+          if (size == -1) {
+            datOut.writeInt(0);
+          } else {
+            datOut.writeInt(size);
+            int bytesOffset = pool.byteOffset;
+            int block = 0;
+            while (bytesOffset > 0) {
+              datOut.writeBytes(pool.buffers[block++], byteBlockSize);
+              bytesOffset -= byteBlockSize;
+            }
+            datOut.writeBytes(pool.buffers[block], pool.byteUpto);
+          }
+          if (lastDocID + 1 < docCount) {
+            fill(datOut, docCount);
+          }
         } else {
-          fill(docCount);
+          // merge path - datOut should be initialized
+          assert datOut != null;
+          if (size == -1) {// no data added
+            datOut.writeInt(0);
+          } else {
+            fill(datOut, docCount);
+          }
         }
+        success = true;
       } finally {
-        super.finish(docCount);
+        pool.dropBuffersAndReset();
+        IOUtils.closeSafely(!success, datOut);
       }
     }
-
-    public long ramBytesUsed() {
-      return oneRecord == null ? 0 : oneRecord.length;
-    }
   }
   
   public static class Reader extends BytesReaderBase {
Index: lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java	(working copy)
@@ -27,12 +27,14 @@
 import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefHash;
 import org.apache.lucene.util.CodecUtil;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.ByteBlockPool.Allocator;
@@ -113,7 +115,7 @@
 
     private final AddressByteStartArray array = new AddressByteStartArray(1,
         bytesUsed);
-    private final BytesRefHash hash = new BytesRefHash(pool, 16, array);
+    private final BytesRefHash hash;
 
     public Writer(Directory dir, String id, AtomicLong bytesUsed)
         throws IOException {
@@ -123,8 +125,8 @@
 
     public Writer(Directory dir, String id, Allocator allocator,
         AtomicLong bytesUsed) throws IOException {
-      super(dir, id, CODEC_NAME, VERSION_CURRENT, true,
-          new ByteBlockPool(allocator), bytesUsed);
+      super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+      hash = new BytesRefHash(new ByteBlockPool(allocator), 16, array);
       docToAddress = new int[1];
       bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
     }
@@ -144,8 +146,7 @@
       final int docAddress;
       if (e >= 0) {
         docAddress = array.address[e] = address;
-        address += writePrefixLength(datOut, bytes);
-        datOut.writeBytes(bytes.bytes, bytes.offset, bytes.length);
+        address += bytes.length < 128 ? 1 : 2;
         address += bytes.length;
       } else {
         docAddress = array.address[(-e) - 1];
@@ -169,7 +170,25 @@
     // some last docs that we didn't see
     @Override
     public void finish(int docCount) throws IOException {
+      final IndexOutput datOut = getDataOut();
+      boolean success = false;
       try {
+        final int size = hash.size();
+        final BytesRef bytesRef = new BytesRef();
+        for (int i = 0; i < size; i++) {
+          hash.get(i, bytesRef);
+          writePrefixLength(datOut, bytesRef);
+          datOut.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length);
+        }
+        success = true;
+      } finally {
+        hash.close();
+        IOUtils.closeSafely(!success, datOut);
+      }
+      
+      final IndexOutput idxOut = getIndexOut();
+      success = false;
+      try {
         idxOut.writeInt(address - 1);
         // write index
         // TODO(simonw): -- allow forcing fixed array (not -1)
@@ -189,9 +208,9 @@
           w.add(0);
         }
         w.finish();
+        success = true;
       } finally {
-        hash.close();
-        super.finish(docCount);
+        IOUtils.closeSafely(!success,idxOut);
         bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT
             * (-docToAddress.length));
         docToAddress = null;
Index: lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java	(working copy)
@@ -27,11 +27,13 @@
 import org.apache.lucene.index.values.Bytes.BytesWriterBase;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefHash;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.ByteBlockPool.Allocator;
@@ -56,9 +58,7 @@
     private int[] docToEntry;
     private final Comparator<BytesRef> comp;
 
-    private final BytesRefHash hash = new BytesRefHash(pool,
-        BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
-            BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
+    private final BytesRefHash hash; 
 
     public Writer(Directory dir, String id, Comparator<BytesRef> comp,
         AtomicLong bytesUsed) throws IOException {
@@ -68,13 +68,14 @@
 
     public Writer(Directory dir, String id, Comparator<BytesRef> comp,
         Allocator allocator, AtomicLong bytesUsed) throws IOException {
-      super(dir, id, CODEC_NAME, VERSION_CURRENT, true,
-          new ByteBlockPool(allocator), bytesUsed);
+      super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+      this.hash = new BytesRefHash(new ByteBlockPool(allocator),
+          BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
+              BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
       this.comp = comp;
       docToEntry = new int[1];
       docToEntry[0] = -1;
       bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
-
     }
 
     @Override
@@ -99,14 +100,16 @@
     @Override
     public void finish(int docCount) throws IOException {
       final int count = hash.size();
+      final IndexOutput datOut = getDataOut();
+      long offset = 0;
+      long lastOffset = 0;
+      final int[] index = new int[count];
+      final long[] offsets = new long[count];
+      boolean success = false;
       try {
         final int[] sortedEntries = hash.sort(comp);
         // first dump bytes data, recording index & offset as
         // we go
-        long offset = 0;
-        long lastOffset = 0;
-        final int[] index = new int[count];
-        final long[] offsets = new long[count];
         for (int i = 0; i < count; i++) {
           final int e = sortedEntries[i];
           offsets[i] = offset;
@@ -118,7 +121,14 @@
           lastOffset = offset;
           offset += bytes.length;
         }
-
+        success = true;
+      } finally {
+        IOUtils.closeSafely(!success, datOut);
+        hash.close();
+      }
+      final IndexOutput idxOut = getIndexOut();
+      success = false;
+      try {
         // total bytes of data
         idxOut.writeLong(offset);
 
@@ -145,11 +155,12 @@
           offsetWriter.add(offsets[i]);
         }
         offsetWriter.finish();
+        success = true;
       } finally {
-        super.finish(docCount);
         bytesUsed.addAndGet((-docToEntry.length)
             * RamUsageEstimator.NUM_BYTES_INT);
-        hash.close();
+        docToEntry = null;
+        IOUtils.closeSafely(!success, idxOut);
       }
     }
   }
Index: lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java	(revision 1140686)
+++ lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java	(working copy)
@@ -25,9 +25,11 @@
 import org.apache.lucene.index.values.Bytes.BytesWriterBase;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.AttributeSource;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.PagedBytes;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.packed.PackedInts;
@@ -48,12 +50,13 @@
     // start at -1 if the first added value is > 0
     private int lastDocID = -1;
     private long[] docToAddress;
-
+    private final IndexOutput datOut;
     public Writer(Directory dir, String id, AtomicLong bytesUsed)
         throws IOException {
-      super(dir, id, CODEC_NAME, VERSION_CURRENT, true, null, bytesUsed);
+      super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
       docToAddress = new long[1];
       bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
+      datOut = getDataOut();
     }
 
     // Fills up to but not including this docID
@@ -82,6 +85,9 @@
 
     @Override
     public void finish(int docCount) throws IOException {
+      IOUtils.closeSafely(false, datOut);
+      final IndexOutput idxOut = getIndexOut();
+      boolean success = false;
       try {
         if (lastDocID == -1) {
           idxOut.writeVLong(0);
@@ -101,11 +107,12 @@
           }
           w.finish();
         }
+        success = true;
       } finally {
         bytesUsed.addAndGet(-(docToAddress.length)
             * RamUsageEstimator.NUM_BYTES_INT);
         docToAddress = null;
-        super.finish(docCount);
+        IOUtils.closeSafely(!success, idxOut);
       }
     }
 
Index: lucene/src/test/org/apache/lucene/index/values/TestDocValues.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/values/TestDocValues.java	(revision 1140686)
+++ lucene/src/test/org/apache/lucene/index/values/TestDocValues.java	(working copy)
@@ -64,7 +64,7 @@
     Writer w = Bytes.getWriter(dir, "test", mode, comp, fixedSize, trackBytes);
     int maxDoc = 220;
     final String[] values = new String[maxDoc];
-    final int fixedLength = 3 + random.nextInt(7);
+    final int fixedLength = 1 + atLeast(50);
     for (int i = 0; i < 100; i++) {
       final String s;
       if (i > 0 && random.nextInt(5) <= 2) {
Index: lucene/src/test/org/apache/lucene/index/values/TestDocValuesIndexing.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/values/TestDocValuesIndexing.java	(revision 1140686)
+++ lucene/src/test/org/apache/lucene/index/values/TestDocValuesIndexing.java	(working copy)
@@ -329,8 +329,7 @@
     final int numValues = 50 + atLeast(10);
     for (ValueType byteIndexValue : byteVariantList) {
       List<Closeable> closeables = new ArrayList<Closeable>();
-
-      int bytesSize = 1 + atLeast(10);
+      final int bytesSize = 1 + atLeast(50);
       OpenBitSet deleted = indexValues(w, numValues, byteIndexValue,
           byteVariantList, withDeletions, bytesSize);
       final IndexReader r = IndexReader.open(w, withDeletions);
