Index: lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java
===================================================================
--- lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java	(revision 1137007)
+++ lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java	(working copy)
@@ -28,6 +28,7 @@
 import org.apache.lucene.index.IndexWriter;       // javadocs
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.index.MergeScheduler;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.store.RAMDirectory;      // javadocs
 import org.apache.lucene.util.IOUtils;
 
@@ -234,6 +235,15 @@
   }
 
   @Override
+  public synchronized CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    if (cache.fileExists(name)) {
+      return cache.openCompoundInput(name, bufferSize, mode);
+    } else {
+      return delegate.openCompoundInput(name, bufferSize, mode);
+    }
+  }
+
+  @Override
   public synchronized IndexInput openInput(String name, int bufferSize) throws IOException {
     if (cache.fileExists(name)) {
       return cache.openInput(name, bufferSize);
Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java
===================================================================
--- lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java	(revision 1137007)
+++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java	(working copy)
@@ -78,7 +78,7 @@
     _TestUtil.rmDir(destDir2);
     destDir2.mkdirs();
     IndexSplitter.main(new String[] {dir.getAbsolutePath(), destDir2.getAbsolutePath(), splitSegName});
-    assertEquals(3, destDir2.listFiles().length);
+    assertEquals(4, destDir2.listFiles().length);
     Directory fsDirDest2 = newFSDirectory(destDir2);
     r = IndexReader.open(fsDirDest2, true);
     assertEquals(50, r.maxDoc());
Index: lucene/src/java/org/apache/lucene/index/CompoundFileReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/CompoundFileReader.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/CompoundFileReader.java	(working copy)
@@ -1,307 +0,0 @@
-package org.apache.lucene.index;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.BufferedIndexInput;
-import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.Lock;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-/**
- * Class for accessing a compound stream.
- * This class implements a directory, but is limited to only read operations.
- * Directory methods that would normally modify data throw an exception.
- * @lucene.experimental
- */
-public class CompoundFileReader extends Directory {
-  
-  private int readBufferSize;
-  
-  private static final class FileEntry {
-    long offset;
-    long length;
-  }
-  
-  // Base info
-  private Directory directory;
-  private String fileName;
-  
-  private IndexInput stream;
-  private HashMap<String,FileEntry> entries = new HashMap<String,FileEntry>();
-  
-  public CompoundFileReader(Directory dir, String name) throws IOException {
-    this(dir, name, BufferedIndexInput.BUFFER_SIZE);
-  }
-  
-  public CompoundFileReader(Directory dir, String name, int readBufferSize) throws IOException {
-    assert !(dir instanceof CompoundFileReader) : "compound file inside of compound file: " + name;
-    directory = dir;
-    fileName = name;
-    this.readBufferSize = readBufferSize;
-    
-    boolean success = false;
-    
-    try {
-      stream = dir.openInput(name, readBufferSize);
-      
-      // read the first VInt. If it is negative, it's the version number
-      // otherwise it's the count (pre-3.1 indexes)
-      int firstInt = stream.readVInt();
-      
-      final int count;
-      final boolean stripSegmentName;
-      if (firstInt < CompoundFileWriter.FORMAT_PRE_VERSION) {
-        if (firstInt < CompoundFileWriter.FORMAT_CURRENT) {
-          throw new CorruptIndexException("Incompatible format version: "
-              + firstInt + " expected " + CompoundFileWriter.FORMAT_CURRENT);
-        }
-        // It's a post-3.1 index, read the count.
-        count = stream.readVInt();
-        stripSegmentName = false;
-      } else {
-        count = firstInt;
-        stripSegmentName = true;
-      }
-      
-      // read the directory and init files
-      FileEntry entry = null;
-      for (int i=0; i<count; i++) {
-        long offset = stream.readLong();
-        String id = stream.readString();
-        
-        if (stripSegmentName) {
-          // Fix the id to not include the segment names. This is relevant for
-          // pre-3.1 indexes.
-          id = IndexFileNames.stripSegmentName(id);
-        }
-        
-        if (entry != null) {
-          // set length of the previous entry
-          entry.length = offset - entry.offset;
-        }
-        
-        entry = new FileEntry();
-        entry.offset = offset;
-        entries.put(id, entry);
-      }
-      
-      // set the length of the final entry
-      if (entry != null) {
-        entry.length = stream.length() - entry.offset;
-      }
-      
-      success = true;
-      
-    } finally {
-      if (!success && (stream != null)) {
-        try {
-          stream.close();
-        } catch (IOException e) { }
-      }
-    }
-  }
-  
-  public Directory getDirectory() {
-    return directory;
-  }
-  
-  public String getName() {
-    return fileName;
-  }
-  
-  @Override
-  public synchronized void close() throws IOException {
-    if (stream == null)
-      throw new IOException("Already closed");
-    
-    entries.clear();
-    stream.close();
-    stream = null;
-  }
-  
-  @Override
-  public synchronized IndexInput openInput(String id) throws IOException {
-    // Default to readBufferSize passed in when we were opened
-    return openInput(id, readBufferSize);
-  }
-  
-  @Override
-  public synchronized IndexInput openInput(String id, int readBufferSize) throws IOException {
-    if (stream == null)
-      throw new IOException("Stream closed");
-    
-    id = IndexFileNames.stripSegmentName(id);
-    final FileEntry entry = entries.get(id);
-    if (entry == null)
-      throw new IOException("No sub-file with id " + id + " found (files: " + entries.keySet() + ")");
-    
-    return new CSIndexInput(stream, entry.offset, entry.length, readBufferSize);
-  }
-  
-  /** Returns an array of strings, one for each file in the directory. */
-  @Override
-  public String[] listAll() {
-    String[] res = entries.keySet().toArray(new String[entries.size()]);
-    // Add the segment name
-    String seg = fileName.substring(0, fileName.indexOf('.'));
-    for (int i = 0; i < res.length; i++) {
-      res[i] = seg + res[i];
-    }
-    return res;
-  }
-  
-  /** Returns true iff a file with the given name exists. */
-  @Override
-  public boolean fileExists(String name) {
-    return entries.containsKey(IndexFileNames.stripSegmentName(name));
-  }
-  
-  /** Returns the time the compound file was last modified. */
-  @Override
-  public long fileModified(String name) throws IOException {
-    return directory.fileModified(fileName);
-  }
-  
-  /** Not implemented
-   * @throws UnsupportedOperationException */
-  @Override
-  public void deleteFile(String name) {
-    throw new UnsupportedOperationException();
-  }
-  
-  /** Not implemented
-   * @throws UnsupportedOperationException */
-  public void renameFile(String from, String to) {
-    throw new UnsupportedOperationException();
-  }
-  
-  /** Returns the length of a file in the directory.
-   * @throws IOException if the file does not exist */
-  @Override
-  public long fileLength(String name) throws IOException {
-    FileEntry e = entries.get(IndexFileNames.stripSegmentName(name));
-    if (e == null)
-      throw new FileNotFoundException(name);
-    return e.length;
-  }
-  
-  /** Not implemented
-   * @throws UnsupportedOperationException */
-  @Override
-  public IndexOutput createOutput(String name) {
-    throw new UnsupportedOperationException();
-  }
-  
-  @Override
-  public void sync(Collection<String> names) throws IOException {
-  }
-  
-  /** Not implemented
-   * @throws UnsupportedOperationException */
-  @Override
-  public Lock makeLock(String name) {
-    throw new UnsupportedOperationException();
-  }
-  
-  /** Implementation of an IndexInput that reads from a portion of the
-   *  compound file. The visibility is left as "package" *only* because
-   *  this helps with testing since JUnit test cases in a different class
-   *  can then access package fields of this class.
-   */
-  static final class CSIndexInput extends BufferedIndexInput {
-    IndexInput base;
-    long fileOffset;
-    long length;
-    
-    CSIndexInput(final IndexInput base, final long fileOffset, final long length) {
-      this(base, fileOffset, length, BufferedIndexInput.BUFFER_SIZE);
-    }
-    
-    CSIndexInput(final IndexInput base, final long fileOffset, final long length, int readBufferSize) {
-      super(readBufferSize);
-      this.base = (IndexInput)base.clone();
-      this.fileOffset = fileOffset;
-      this.length = length;
-    }
-    
-    @Override
-    public Object clone() {
-      CSIndexInput clone = (CSIndexInput)super.clone();
-      clone.base = (IndexInput)base.clone();
-      clone.fileOffset = fileOffset;
-      clone.length = length;
-      return clone;
-    }
-    
-    /** Expert: implements buffer refill.  Reads bytes from the current
-     *  position in the input.
-     * @param b the array to read bytes into
-     * @param offset the offset in the array to start storing bytes
-     * @param len the number of bytes to read
-     */
-    @Override
-    protected void readInternal(byte[] b, int offset, int len) throws IOException {
-      long start = getFilePointer();
-      if(start + len > length)
-        throw new IOException("read past EOF");
-      base.seek(fileOffset + start);
-      base.readBytes(b, offset, len, false);
-    }
-    
-    /** Expert: implements seek.  Sets current position in this file, where
-     *  the next {@link #readInternal(byte[],int,int)} will occur.
-     * @see #readInternal(byte[],int,int)
-     */
-    @Override
-    protected void seekInternal(long pos) {}
-    
-    /** Closes the stream to further operations. */
-    @Override
-    public void close() throws IOException {
-      base.close();
-    }
-    
-    @Override
-    public long length() {
-      return length;
-    }
-    
-    @Override
-    public void copyBytes(IndexOutput out, long numBytes) throws IOException {
-      // Copy first whatever is in the buffer
-      numBytes -= flushBuffer(out, numBytes);
-      
-      // If there are more bytes left to copy, delegate the copy task to the
-      // base IndexInput, in case it can do an optimized copy.
-      if (numBytes > 0) {
-        long start = getFilePointer();
-        if (start + numBytes > length) {
-          throw new IOException("read past EOF");
-        }
-        base.seek(fileOffset + start);
-        base.copyBytes(out, numBytes);
-      }
-    }
-  }
-}
Index: lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/CompoundFileWriter.java	(working copy)
@@ -18,9 +18,13 @@
  */
 
 import java.io.IOException;
+import java.util.Collection;
 import java.util.HashSet;
 import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
 
+import org.apache.lucene.index.MergePolicy.MergeAbortedException;
 import org.apache.lucene.index.codecs.MergeState;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
@@ -50,38 +54,44 @@
  * @lucene.internal
  */
 public final class CompoundFileWriter {
-
+  
     private static final class FileEntry {
         /** source file */
         String file;
-
-        /** temporary holder for the start of directory entry for this file */
-        long directoryOffset;
-
+        long length;
         /** temporary holder for the start of this file's data section */
-        long dataOffset;
-        
+        long offset;
         /** the directory which contains the file. */
         Directory dir;
     }
 
     // Before versioning started.
-    static final int FORMAT_PRE_VERSION = 0;
+    /** @lucene.internal */
+    public static final int FORMAT_PRE_VERSION = 0;
     
     // Segment name is not written in the file names.
     static final int FORMAT_NO_SEGMENT_PREFIX = -1;
+    static final int FORMAT_APPEND_FILES = -2;
+    
+    public static final int ENTRY_FORMAT_CURRENT = -1; 
 
     // NOTE: if you introduce a new format, make it 1 lower
     // than the current one, and always change this if you
     // switch to a new format!
-    static final int FORMAT_CURRENT = FORMAT_NO_SEGMENT_PREFIX;
+    /** @lucene.internal */
+    public static final int FORMAT_CURRENT = FORMAT_APPEND_FILES;
 
-    private Directory directory;
-    private String fileName;
-    private HashSet<String> ids;
-    private LinkedList<FileEntry> entries;
+    private final Directory directory;
+    private final Set<String> ids = new HashSet<String>();
+    private final List<FileEntry> separateEntries = new LinkedList<FileEntry>();
+    private final List<FileEntry> entries = new LinkedList<FileEntry>();
+
     private boolean merged = false;
     private MergeState.CheckAbort checkAbort;
+    private IndexOutput dataOut;
+    private IndexOutput currentOutput;
+    private final String entryTableName;
+    private final String dataFileName;
 
     /** Create the compound stream in the specified file. The file name is the
      *  entire name (no extensions are added).
@@ -98,9 +108,8 @@
             throw new NullPointerException("name cannot be null");
         this.checkAbort = checkAbort;
         directory = dir;
-        fileName = name;
-        ids = new HashSet<String>();
-        entries = new LinkedList<FileEntry>();
+        entryTableName = IndexFileNames.segmentFileName(IndexFileNames.stripExtension(name), "", IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION);
+        dataFileName = name;
     }
 
     /** Returns the directory of the compound file. */
@@ -110,7 +119,7 @@
 
     /** Returns the name of the compound file. */
     public String getName() {
-        return fileName;
+        return dataFileName;
     }
 
     /** Add a source stream. <code>file</code> is the string by which the 
@@ -130,22 +139,15 @@
      * external {@link Directory}.
      */
     public void addFile(String file, Directory dir) {
-        if (merged)
-            throw new IllegalStateException(
-                "Can't add extensions after merge has been called");
-
-        if (file == null)
-            throw new NullPointerException(
-                "file cannot be null");
-
+        ensureOpen();
+        assert file != null : "file should not be null";
         if (! ids.add(file))
             throw new IllegalArgumentException(
                 "File " + file + " already added");
-
         FileEntry entry = new FileEntry();
         entry.file = file;
         entry.dir = dir;
-        entries.add(entry);
+        separateEntries.add(entry);
     }
 
     /** Merge files with the extensions added up to now.
@@ -158,95 +160,218 @@
         if (merged)
             throw new IllegalStateException("Merge already performed");
 
-        if (entries.isEmpty())
+        if (entries.isEmpty() && separateEntries.isEmpty())
             throw new IllegalStateException("No entries to merge have been defined");
-
+        
         merged = true;
 
         // open the compound stream
-        IndexOutput os = directory.createOutput(fileName);
         IOException priorException = null;
+        IndexOutput entryTableOut = null;
         try {
-            // Write the Version info - must be a VInt because CFR reads a VInt
-            // in older versions!
-            os.writeVInt(FORMAT_CURRENT);
-            
-            // Write the number of entries
-            os.writeVInt(entries.size());
-
+            initDataOut();
+            assert dataOut != null;
             // Write the directory with all offsets at 0.
             // Remember the positions of directory entries so that we can
             // adjust the offsets later
             long totalSize = 0;
-            for (FileEntry fe : entries) {
-                fe.directoryOffset = os.getFilePointer();
-                os.writeLong(0);    // for now
-                os.writeString(IndexFileNames.stripSegmentName(fe.file));
-                totalSize += fe.dir.fileLength(fe.file);
+            for (FileEntry fe : separateEntries) {
+                fe.length = fe.dir.fileLength(fe.file);
+                totalSize += fe.length; 
             }
-
             // Pre-allocate size of file as optimization --
             // this can potentially help IO performance as
             // we write the file and also later during
             // searching.  It also uncovers a disk-full
             // situation earlier and hopefully without
             // actually filling disk to 100%:
-            final long finalLength = totalSize+os.getFilePointer();
-            os.setLength(finalLength);
-
+            final long finalLength = totalSize+dataOut.getFilePointer();
+            dataOut.setLength(finalLength);
             // Open the files and copy their data into the stream.
-            // Remember the locations of each file's data section.
-            for (FileEntry fe : entries) {
-                fe.dataOffset = os.getFilePointer();
-                copyFile(fe, os);
-            }
-
-            // Write the data offsets into the directory of the compound stream
-            for (FileEntry fe : entries) {
-                os.seek(fe.directoryOffset);
-                os.writeLong(fe.dataOffset);
-            }
-
-            assert finalLength == os.length();
-
+            copyRemainingFiles(separateEntries, dataOut);
+            entries.addAll(separateEntries);
+           
+            assert assertFileLength(finalLength, dataOut);
             // Close the output stream. Set the os to null before trying to
             // close so that if an exception occurs during the close, the
             // finally clause below will not attempt to close the stream
             // the second time.
-            IndexOutput tmp = os;
-            os = null;
-            tmp.close();
+            
+            entryTableOut = directory.createOutput(entryTableName);
+            
+            writeEntryTable(entries, entryTableOut);
         } catch (IOException e) {
           priorException = e;
         } finally {
-          IOUtils.closeSafely(priorException, os);
+          IOUtils.closeSafely(priorException, dataOut, entryTableOut);
         }
     }
-
+    
+  private static boolean assertFileLength(long expected, IndexOutput out) throws IOException {
+    out.flush();
+    assert expected == out.length() : "expected: " + expected + " was " +  out.length();
+    return true;
+  }
+    
+  private final void ensureOpen() {
+    if (merged) {
+      throw new IllegalStateException("Merge already performed");
+    }
+  }
+    
+  protected void copyRemainingFiles(Collection<FileEntry> entries,
+      IndexOutput dataOut) throws IOException {
+    for (FileEntry fileEntry : entries) {
+      copySeparateFile(dataOut, fileEntry);
+    }
+  }
   /**
    * Copy the contents of the file with specified extension into the provided
    * output stream.
    */
-  private void copyFile(FileEntry source, IndexOutput os) throws IOException {
-    IndexInput is = source.dir.openInput(source.file);
+  private final long copySeparateFile(IndexOutput dataOut, FileEntry fileEntry)
+      throws IOException, MergeAbortedException {
+    final IndexInput is = fileEntry.dir.openInput(fileEntry.file);
     try {
-      long startPtr = os.getFilePointer();
-      long length = is.length();
-      os.copyBytes(is, length);
-
+      final long startPtr = dataOut.getFilePointer();
+      final long length = fileEntry.length;
+      dataOut.copyBytes(is, length);
       if (checkAbort != null) {
         checkAbort.work(length);
       }
 
       // Verify that the output length diff is equal to original file
-      long endPtr = os.getFilePointer();
+      long endPtr = dataOut.getFilePointer();
       long diff = endPtr - startPtr;
       if (diff != length)
         throw new IOException("Difference in the output file offsets " + diff
             + " does not match the original file length " + length);
-
+      fileEntry.offset = startPtr;
+      return length;
     } finally {
       is.close();
     }
+    
   }
+  
+  protected void writeEntryTable(Collection<FileEntry> entries, IndexOutput entryOut ) throws IOException {
+    entryOut.writeInt(ENTRY_FORMAT_CURRENT);
+    entryOut.writeVInt(entries.size());
+    for (FileEntry fe : entries) {
+      entryOut.writeString(IndexFileNames.stripSegmentName(fe.file));
+      entryOut.writeLong(fe.offset);
+      entryOut.writeLong(fe.length);
+    }
+  }
+  
+  public IndexOutput createOutput(String name) throws IOException {
+    ensureOpen();
+    boolean success = false;
+    try {
+      assert name != null : "name must not be null";
+      if (!ids.add(name)) {
+        throw new IllegalArgumentException("File " + name + " already exists");
+      }
+      final FileEntry entry = new FileEntry();
+      entry.file = name;
+      if (currentOutput == null) {
+        initDataOut();
+        entries.add(entry);
+        return currentOutput = new ForwardingIndexOutput(dataOut, entry);
+      } else {
+        separateEntries.add(entry);
+        entry.dir = this.directory;
+        return directory.createOutput(name);
+      }
+    } finally {
+      if (!success) {
+        IOUtils.closeSafely(true, dataOut);
+      }
+    }
+  }
+  
+  private final void initDataOut() throws IOException {
+    if (dataOut == null) {
+      boolean success = false;
+      try {
+        dataOut = directory.createOutput(dataFileName);
+        dataOut.writeVInt(FORMAT_CURRENT);
+        success = true;
+      } finally {
+        if (!success) {
+          IOUtils.closeSafely(true, dataOut);
+        }
+      }
+    }
+  }
+  
+  private final class ForwardingIndexOutput extends IndexOutput {
+    private final IndexOutput delegate;
+    private final long offset;
+    private boolean closed;
+    private FileEntry entry;
+    private long writtenBytes;
+    
+    
+    ForwardingIndexOutput(IndexOutput delegate, FileEntry entry) {
+      super();
+      this.delegate = delegate;
+      this.entry = entry;
+      entry.offset = offset = delegate.getFilePointer();
+      assert assertLength(offset);
+    }
+    
+    private boolean assertLength(long currentLength) {
+      List<FileEntry> entries = CompoundFileWriter.this.entries;
+      FileEntry previous = entries.get(entries.size()-1);
+      assert previous.offset + previous.length == currentLength;
+      return true;
+    }
+
+    @Override
+    public void flush() throws IOException {
+      delegate.flush();
+    }
+
+    @Override
+    public void close() throws IOException {
+      if (!closed) {
+        closed = true;
+        entry.length = writtenBytes;
+        CompoundFileWriter.this.currentOutput = null;
+      }
+    }
+
+    @Override
+    public long getFilePointer() {
+      return delegate.getFilePointer() - offset;
+    }
+
+    @Override
+    public void seek(long pos) throws IOException {
+      assert !closed;
+      delegate.seek(offset + pos);
+    }
+
+    @Override
+    public long length() throws IOException {
+      assert !closed;
+      return delegate.length() - offset;
+    }
+
+    @Override
+    public void writeByte(byte b) throws IOException {
+      assert !closed;
+      writtenBytes++;
+      delegate.writeByte(b);
+    }
+
+    @Override
+    public void writeBytes(byte[] b, int offset, int length) throws IOException {
+      assert !closed;
+      writtenBytes += length();
+      delegate.writeBytes(b, offset, length);
+    }
+  }
+ 
 }
Index: lucene/src/java/org/apache/lucene/index/IndexFileNames.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/IndexFileNames.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/IndexFileNames.java	(working copy)
@@ -68,6 +68,9 @@
 
   /** Extension of compound file */
   public static final String COMPOUND_FILE_EXTENSION = "cfs";
+  
+  /** Extension of compund file entries */
+  public static final String COMPOUND_FILE_ENTRIES_EXTENSION = "cfe";
 
   /** Extension of compound file for doc store files*/
   public static final String COMPOUND_FILE_STORE_EXTENSION = "cfx";
@@ -93,6 +96,7 @@
    */
   public static final String INDEX_EXTENSIONS[] = new String[] {
     COMPOUND_FILE_EXTENSION,
+    COMPOUND_FILE_ENTRIES_EXTENSION,
     FIELD_INFOS_EXTENSION,
     FIELDS_INDEX_EXTENSION,
     FIELDS_EXTENSION,
@@ -244,7 +248,15 @@
     }
     return filename;
   }
-
+  
+  public static String stripExtension(String filename) {
+    int idx = filename.indexOf('.');
+    if (idx != -1) {
+      filename = filename.substring(0, idx);
+    }
+    return filename;
+  }
+  
   /**
    * Returns true if the given filename ends with the separate norms file
    * pattern: {@code SEPARATE_NORMS_EXTENSION + "[0-9]+"}.
Index: lucene/src/java/org/apache/lucene/index/IndexReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/IndexReader.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/IndexReader.java	(working copy)
@@ -26,6 +26,7 @@
 import org.apache.lucene.index.codecs.PerDocValues;
 import org.apache.lucene.index.values.IndexDocValues;
 import org.apache.lucene.store.*;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
@@ -1432,14 +1433,14 @@
     }
 
     Directory dir = null;
-    CompoundFileReader cfr = null;
+    CompoundFileDirectory cfr = null;
 
     try {
       File file = new File(filename);
       String dirname = file.getAbsoluteFile().getParent();
       filename = file.getName();
       dir = FSDirectory.open(new File(dirname));
-      cfr = new CompoundFileReader(dir, filename);
+      cfr = dir.openCompoundInput(filename, BufferedIndexInput.BUFFER_SIZE, OpenMode.READ);
 
       String [] files = cfr.listAll();
       ArrayUtil.mergeSort(files);   // sort the array of filename so that the output is more readable
Index: lucene/src/java/org/apache/lucene/index/IndexWriter.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/IndexWriter.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/IndexWriter.java	(working copy)
@@ -3510,6 +3510,8 @@
 
             synchronized(this) {
               deleter.deleteFile(compoundFileName);
+              
+              deleter.deleteFile(IndexFileNames.segmentFileName(mergedName, "", IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
               deleter.deleteNewFiles(merge.info.files());
             }
           }
Index: lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java	(working copy)
@@ -23,6 +23,8 @@
 import org.apache.lucene.index.codecs.Codec;
 import org.apache.lucene.index.codecs.FieldsProducer;
 import org.apache.lucene.index.codecs.PerDocValues;
+import org.apache.lucene.store.CompoundFileDirectory;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.store.Directory;
 
 /** Holds core readers that are shared (unchanged) when
@@ -52,8 +54,8 @@
   
   FieldsReader fieldsReaderOrig;
   TermVectorsReader termVectorsReaderOrig;
-  CompoundFileReader cfsReader;
-  CompoundFileReader storeCFSReader;
+  CompoundFileDirectory cfsReader;
+  CompoundFileDirectory storeCFSReader;
 
   
   
@@ -73,7 +75,7 @@
     try {
       Directory dir0 = dir;
       if (si.getUseCompoundFile()) {
-        cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+        cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize, OpenMode.READ);
         dir0 = cfsReader;
       }
       cfsDir = dir0;
@@ -161,9 +163,9 @@
       if (si.getDocStoreOffset() != -1) {
         if (si.getDocStoreIsCompoundFile()) {
           assert storeCFSReader == null;
-          storeCFSReader = new CompoundFileReader(dir,
+          storeCFSReader = dir.openCompoundInput(
               IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
-              readBufferSize);
+              readBufferSize, OpenMode.READ);
           storeDir = storeCFSReader;
           assert storeDir != null;
         } else {
@@ -175,7 +177,7 @@
         // was not used, but then we are asked to open doc
         // stores after the segment has switched to CFS
         if (cfsReader == null) {
-          cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+          cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize, OpenMode.READ);
         }
         storeDir = cfsReader;
         assert storeDir != null;
Index: lucene/src/java/org/apache/lucene/index/SegmentInfo.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/SegmentInfo.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/SegmentInfo.java	(working copy)
@@ -30,6 +30,8 @@
 import org.apache.lucene.index.codecs.Codec;
 import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.index.codecs.DefaultSegmentInfosWriter;
+import org.apache.lucene.store.BufferedIndexInput;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
@@ -245,7 +247,7 @@
       }
       final Directory dirToTest;
       if (isCompoundFile) {
-        dirToTest = new CompoundFileReader(dir, IndexFileNames.segmentFileName(storesSegment, "", ext));
+        dirToTest = dir.openCompoundInput(IndexFileNames.segmentFileName(storesSegment, "", ext), BufferedIndexInput.BUFFER_SIZE, OpenMode.READ);
       } else {
         dirToTest = dir;
       }
@@ -263,8 +265,8 @@
     if (fieldInfos == null) {
       Directory dir0 = dir;
       if (isCompoundFile && checkCompoundFile) {
-        dir0 = new CompoundFileReader(dir, IndexFileNames.segmentFileName(name,
-            "", IndexFileNames.COMPOUND_FILE_EXTENSION));
+        dir0 = dir.openCompoundInput(IndexFileNames.segmentFileName(name,
+            "", IndexFileNames.COMPOUND_FILE_EXTENSION), BufferedIndexInput.BUFFER_SIZE, OpenMode.READ);
       }
       try {
         fieldInfos = new FieldInfos(dir0, IndexFileNames.segmentFileName(name,
@@ -454,7 +456,7 @@
     this.isCompoundFile = isCompoundFile;
     clearFilesCache();
   }
-
+  
   /**
    * Returns true if this segment is stored as a compound
    * file; else, false.
@@ -617,6 +619,11 @@
 
     if (useCompoundFile) {
       fileSet.add(IndexFileNames.segmentFileName(name, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
+      //nocommit - add reasonable version check here
+      if (version != null && "4.0".equals(version)) {
+        fileSet.add(IndexFileNames.segmentFileName(name, "",
+            IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
+      }
     } else {
       for(String ext : IndexFileNames.NON_STORE_INDEX_EXTENSIONS) {
         addIfExists(fileSet, IndexFileNames.segmentFileName(name, "", ext));
Index: lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java	(working copy)
@@ -19,7 +19,6 @@
 
 import java.io.IOException;
 
-import org.apache.lucene.index.CompoundFileReader;
 import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.FieldsReader;
 import org.apache.lucene.index.IndexFileNames;
@@ -30,6 +29,7 @@
 import org.apache.lucene.store.ChecksumIndexInput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 
 /**
  * Default implementation of {@link SegmentInfosReader}.
@@ -68,13 +68,13 @@
           Directory dir = directory;
           if (si.getDocStoreOffset() != -1) {
             if (si.getDocStoreIsCompoundFile()) {
-              dir = new CompoundFileReader(dir, IndexFileNames.segmentFileName(
+              dir = dir.openCompoundInput(IndexFileNames.segmentFileName(
                   si.getDocStoreSegment(), "",
-                  IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), 1024);
+                  IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), 1024, OpenMode.READ);
             }
           } else if (si.getUseCompoundFile()) {
-            dir = new CompoundFileReader(dir, IndexFileNames.segmentFileName(
-                si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), 1024);
+            dir = dir.openCompoundInput(IndexFileNames.segmentFileName(
+                si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), 1024, OpenMode.READ);
           }
 
           try {
Index: lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java	(working copy)
@@ -25,7 +25,6 @@
 import java.util.Map;
 import java.util.TreeMap;
 
-import org.apache.lucene.index.CompoundFileReader;
 import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.FieldInfo;
@@ -37,8 +36,10 @@
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.codecs.FieldsProducer;
+import org.apache.lucene.store.CompoundFileDirectory;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.UnicodeUtil;
@@ -177,8 +178,8 @@
         // terms reader with index, the segment has switched
         // to CFS
 
-        if (!(dir instanceof CompoundFileReader)) {
-          dir0 = cfsReader = new CompoundFileReader(dir, IndexFileNames.segmentFileName(si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+        if (!(dir instanceof CompoundFileDirectory)) {
+          dir0 = cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize, OpenMode.READ);
         } else {
           dir0 = dir;
         }
Index: lucene/src/java/org/apache/lucene/store/Directory.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/Directory.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/store/Directory.java	(working copy)
@@ -22,6 +22,7 @@
 import java.io.Closeable;
 import java.util.Collection; // for javadocs
 
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.util.IOUtils;
 
 /** A Directory is a flat list of files.  Files may be written once, when they
@@ -117,6 +118,18 @@
   public IndexInput openInput(String name, int bufferSize) throws IOException {
     return openInput(name);
   }
+  
+  /** 
+   * Returns a {@link CompoundFileDirectory} capable of
+   * reading the Lucene compound file format.  
+   * <p>
+   * The default implementation returns 
+   * {@link DefaultCompoundFileDirectory}.
+   * @lucene.experimental
+   */
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    return new DefaultCompoundFileDirectory(this, name, bufferSize, mode);
+  }
 
   /** Construct a {@link Lock}.
    * @param name the name of the lock file
Index: lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java	(working copy)
@@ -25,6 +25,8 @@
 import java.util.Set;
 import java.util.HashSet;
 
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
+
 /**
  * Expert: A Directory instance that switches files between
  * two other Directory instances.
@@ -148,4 +150,9 @@
   public IndexInput openInput(String name) throws IOException {
     return getDirectory(name).openInput(name);
   }
+
+  @Override
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    return getDirectory(name).openCompoundInput(name, bufferSize, mode);
+  }
 }
Index: lucene/src/java/org/apache/lucene/store/MMapDirectory.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/MMapDirectory.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/store/MMapDirectory.java	(working copy)
@@ -31,7 +31,9 @@
 import java.security.PrivilegedActionException;
 import java.lang.reflect.Method;
 
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.IOUtils;
 
 /** File-based {@link Directory} implementation that uses
  *  mmap for reading, and {@link
@@ -213,12 +215,54 @@
     File f = new File(getDirectory(), name);
     RandomAccessFile raf = new RandomAccessFile(f, "r");
     try {
-      return new MMapIndexInput(raf, chunkSizePower);
+      return new MMapIndexInput(raf, 0, raf.length(), chunkSizePower);
     } finally {
       raf.close();
     }
   }
+  
+  @Override
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    return new MMapCompoundFileDirectory(this, name, bufferSize, mode);
+  }
+  
+  private final class MMapCompoundFileDirectory extends CompoundFileDirectory {
+    private RandomAccessFile raf = null;
 
+    public MMapCompoundFileDirectory(Directory directory, String fileName, int readBufferSize, OpenMode mode) throws IOException {
+      super(directory, fileName, readBufferSize, mode);
+      if (mode == OpenMode.READ) {
+        IndexInput stream = null;
+        try {
+          File f = new File(MMapDirectory.this.getDirectory(), fileName);
+          raf = new RandomAccessFile(f, "r");
+          stream = new MMapIndexInput(raf, 0, raf.length(), chunkSizePower);
+          init(CompoundFileDirectory.readEntries(stream, directory, fileName));
+          stream.close();
+        } catch (IOException e) {
+          // throw our original exception
+          IOUtils.closeSafely(e, raf, stream);
+        }
+      } else {
+        init(null);
+      }
+    }
+
+    @Override
+    public IndexInput openInputSlice(String id, long offset, long length, int readBufferSize) throws IOException {
+      return new MMapIndexInput(raf, offset, length, chunkSizePower);
+    }
+
+    @Override
+    public synchronized void close() throws IOException {
+      try {
+        raf.close();
+      } finally {
+        super.close();
+      }
+    }
+  }
+
   // Because Java's ByteBuffer uses an int to address the
   // values, it's necessary to access a file >
   // Integer.MAX_VALUE in size using multiple byte buffers.
@@ -235,8 +279,8 @@
   
     private boolean isClone = false;
     
-    MMapIndexInput(RandomAccessFile raf, int chunkSizePower) throws IOException {
-      this.length = raf.length();
+    MMapIndexInput(RandomAccessFile raf, long offset, long length, int chunkSizePower) throws IOException {
+      this.length = length;
       this.chunkSizePower = chunkSizePower;
       this.chunkSize = 1L << chunkSizePower;
       this.chunkSizeMask = chunkSize - 1L;
@@ -261,7 +305,7 @@
           ? chunkSize
           : (length - bufferStart)
         );
-        this.buffers[bufNr] = rafc.map(MapMode.READ_ONLY, bufferStart, bufSize);
+        this.buffers[bufNr] = rafc.map(MapMode.READ_ONLY, offset + bufferStart, bufSize);
         bufferStart += bufSize;
       }
       seek(0L);
Index: lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java	(working copy)
@@ -24,6 +24,10 @@
 import java.nio.channels.FileChannel;
 import java.util.concurrent.Future; // javadoc
 
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
+import org.apache.lucene.store.SimpleFSDirectory.SimpleFSIndexInput;
+import org.apache.lucene.util.IOUtils;
+
 /**
  * An {@link FSDirectory} implementation that uses java.nio's FileChannel's
  * positional read, which allows multiple threads to read from the same file
@@ -77,7 +81,52 @@
     ensureOpen();
     return new NIOFSIndexInput(new File(getDirectory(), name), bufferSize, getReadChunkSize());
   }
+  
+  @Override
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    return new NIOFSCompoundFileDirectory(this, name, bufferSize, mode);
+  }
 
+  private final class NIOFSCompoundFileDirectory extends CompoundFileDirectory {
+    private SimpleFSIndexInput.Descriptor fd;
+    private FileChannel fc;
+
+    public NIOFSCompoundFileDirectory(Directory directory, String fileName, int readBufferSize, OpenMode mode) throws IOException {
+      super(directory, fileName, readBufferSize, mode);
+      if (mode == OpenMode.READ) {
+        IndexInput stream = null;
+        try {
+          File f = new File(NIOFSDirectory.this.getDirectory(), fileName);
+          fd = new SimpleFSIndexInput.Descriptor(f, "r");
+          fc = fd.getChannel();
+          stream = new NIOFSIndexInput(fd, fc, 0, fd.length, readBufferSize,
+              getReadChunkSize());
+          init(CompoundFileDirectory.readEntries(stream, directory, fileName));
+          stream.close();
+        } catch (IOException e) {
+          // throw our original exception
+          IOUtils.closeSafely(e, fc, fd, stream);
+        }
+      } else {
+        init(null);
+      }
+    }
+    
+    @Override
+    public IndexInput openInputSlice(String id, long offset, long length, int readBufferSize) throws IOException {
+      return new NIOFSIndexInput(fd, fc, offset, length, readBufferSize, getReadChunkSize());
+    }
+
+    @Override
+    public synchronized void close() throws IOException {
+      try {
+        IOUtils.closeSafely(false, fc, fd);
+      } finally {
+        super.close();
+      }
+    }
+  }
+
   protected static class NIOFSIndexInput extends SimpleFSDirectory.SimpleFSIndexInput {
 
     private ByteBuffer byteBuf; // wraps the buffer for NIO
@@ -91,6 +140,12 @@
       super(path, bufferSize, chunkSize);
       channel = file.getChannel();
     }
+    
+    public NIOFSIndexInput(Descriptor file, FileChannel fc, long off, long length, int bufferSize, int chunkSize) throws IOException {
+      super(file, off, length, bufferSize, chunkSize);
+      channel = fc;
+      isClone = true;
+    }
 
     @Override
     protected void newBuffer(byte[] newBuffer) {
@@ -145,7 +200,11 @@
       int readLength = bb.limit() - readOffset;
       assert readLength == len;
 
-      long pos = getFilePointer();
+      long pos = getFilePointer() + off;
+      
+      if (pos + len > end) {
+        throw new IOException("read past EOF");
+      }
 
       try {
         while (readLength > 0) {
@@ -159,9 +218,6 @@
           }
           bb.limit(limit);
           int i = channel.read(bb, pos);
-          if (i == -1) {
-            throw new IOException("read past EOF");
-          }
           pos += i;
           readOffset += i;
           readLength -= i;
Index: lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java	(revision 1137007)
+++ lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java	(working copy)
@@ -21,6 +21,9 @@
 import java.io.IOException;
 import java.io.RandomAccessFile;
 
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
+import org.apache.lucene.util.IOUtils;
+
 /** A straightforward implementation of {@link FSDirectory}
  *  using java.io.RandomAccessFile.  However, this class has
  *  poor concurrent performance (multiple threads will
@@ -55,7 +58,49 @@
     ensureOpen();
     return new SimpleFSIndexInput(new File(directory, name), bufferSize, getReadChunkSize());
   }
+  
+  @Override
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    return new SimpleFSCompoundFileDirectory(this, name, bufferSize, mode);
+  }
 
+  private final class SimpleFSCompoundFileDirectory extends CompoundFileDirectory {
+    private SimpleFSIndexInput.Descriptor fd;
+
+    public SimpleFSCompoundFileDirectory(Directory directory, String fileName, int readBufferSize, OpenMode mode) throws IOException {
+      super(directory, fileName, readBufferSize, mode);
+      IndexInput stream = null;
+      if (mode == OpenMode.READ) {
+        try {
+          final File f = new File(SimpleFSDirectory.this.getDirectory(), fileName);
+          fd = new SimpleFSIndexInput.Descriptor(f, "r");
+          stream = new SimpleFSIndexInput(fd, 0, fd.length, readBufferSize, getReadChunkSize());
+          init(CompoundFileDirectory.readEntries(stream, directory, fileName));
+          stream.close();
+        } catch (IOException e) {
+          // throw our original exception
+          IOUtils.closeSafely(e, fd, stream);
+        }
+      } else {
+        init(null);
+      }
+    }
+
+    @Override
+    public IndexInput openInputSlice(String id, long offset, long length, int readBufferSize) throws IOException {
+      return new SimpleFSIndexInput(fd, offset, length, readBufferSize, getReadChunkSize());
+    }
+
+    @Override
+    public synchronized void close() throws IOException {
+      try {
+        fd.close();
+      } finally {
+        super.close();
+      }
+    }
+  }
+
   protected static class SimpleFSIndexInput extends BufferedIndexInput {
   
     protected static class Descriptor extends RandomAccessFile {
@@ -84,25 +129,42 @@
     boolean isClone;
     //  LUCENE-1566 - maximum read length on a 32bit JVM to prevent incorrect OOM 
     protected final int chunkSize;
+    protected final long off;
+    protected final long end;
     
     public SimpleFSIndexInput(File path, int bufferSize, int chunkSize) throws IOException {
       super(bufferSize);
-      file = new Descriptor(path, "r");
+      this.file = new Descriptor(path, "r"); 
       this.chunkSize = chunkSize;
+      this.off = 0L;
+      this.end = file.length;
     }
+    
+    public SimpleFSIndexInput(Descriptor file, long off, long length, int bufferSize, int chunkSize) throws IOException {
+      super(bufferSize);
+      this.file = file;
+      this.chunkSize = chunkSize;
+      this.off = off;
+      this.end = off + length;
+      this.isClone = true; // well, we are sorta?
+    }
   
     /** IndexInput methods */
     @Override
     protected void readInternal(byte[] b, int offset, int len)
          throws IOException {
       synchronized (file) {
-        long position = getFilePointer();
+        long position = off + getFilePointer();
         if (position != file.position) {
           file.seek(position);
           file.position = position;
         }
         int total = 0;
 
+        if (position + len > end) {
+          throw new IOException("read past EOF");
+        }
+
         try {
           do {
             final int readLength;
@@ -113,9 +175,6 @@
               readLength = chunkSize;
             }
             final int i = file.read(b, offset + total, readLength);
-            if (i == -1) {
-              throw new IOException("read past EOF");
-            }
             file.position += i;
             total += i;
           } while (total < len);
@@ -144,7 +203,7 @@
   
     @Override
     public long length() {
-      return file.length;
+      return end - off;
     }
   
     @Override
Index: lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java	(revision 1137007)
+++ lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java	(working copy)
@@ -33,6 +33,7 @@
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.codecs.CodecProvider;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.ThrottledIndexOutput;
 import org.apache.lucene.util._TestUtil;
@@ -388,7 +389,7 @@
     }
   }
 
-  private void addFileHandle(Closeable c, String name, boolean input) {
+  void addFileHandle(Closeable c, String name, boolean input) {
     Integer v = openFiles.get(name);
     if (v != null) {
       v = Integer.valueOf(v.intValue()+1);
@@ -416,7 +417,13 @@
     addFileHandle(ii, name, true);
     return ii;
   }
-
+  
+  @Override
+  public synchronized CompoundFileDirectory openCompoundInput(String name, int bufferSize, OpenMode mode) throws IOException {
+    maybeYield();
+    return new MockCompoundFileDirectoryWrapper(name, this, delegate.openCompoundInput(name, bufferSize, mode), mode);
+  }
+  
   /** Provided for testing purposes.  Use sizeInBytes() instead. */
   public synchronized final long getRecomputedSizeInBytes() throws IOException {
     if (!(delegate instanceof RAMDirectory))
@@ -481,7 +488,7 @@
     delegate.close();
   }
 
-  private synchronized void removeOpenFile(Closeable c, String name) {
+  synchronized void removeOpenFile(Closeable c, String name) {
     Integer v = openFiles.get(name);
     // Could be null when crash() was called
     if (v != null) {
Index: lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestAddIndexes.java	(revision 1137007)
+++ lucene/src/test/org/apache/lucene/index/TestAddIndexes.java	(working copy)
@@ -1075,8 +1075,8 @@
     IndexWriter w3 = new IndexWriter(dir, conf);
     w3.addIndexes(readers);
     w3.close();
-    // we should now see segments_X, segments.gen,_Y.cfs, _Z.fnx
-    assertEquals("Only one compound segment should exist", 4, dir.listAll().length);
+    // we should now see segments_X, segments.gen,_Y.cfs,_Y.cfe, _Z.fnx
+    assertEquals("Only one compound segment should exist", 5, dir.listAll().length);
   }
   
   // LUCENE-3126: tests that if a non-CFS segment is copied, it is converted to
Index: lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java	(revision 1137007)
+++ lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java	(working copy)
@@ -41,6 +41,7 @@
 import org.apache.lucene.search.Similarity;
 import org.apache.lucene.search.SimilarityProvider;
 import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.CompoundFileDirectory;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.Bits;
@@ -536,7 +537,7 @@
       // figure out which field number corresponds to
       // "content", and then set our expected file names below
       // accordingly:
-      CompoundFileReader cfsReader = new CompoundFileReader(dir, "_0.cfs");
+      CompoundFileDirectory cfsReader = dir.openCompoundInput("_0.cfs", 1024, org.apache.lucene.store.CompoundFileDirectory.OpenMode.READ);
       FieldInfos fieldInfos = new FieldInfos(cfsReader, "_0.fnm");
       int contentFieldIndex = -1;
       for (FieldInfo fi : fieldInfos) {
@@ -549,7 +550,7 @@
       assertTrue("could not locate the 'content' field number in the _2.cfs segment", contentFieldIndex != -1);
 
       // Now verify file names:
-      String[] expected = new String[] {"_0.cfs",
+      String[] expected = new String[] {"_0.cfs", "_0.cfe",
                                "_0_1.del",
                                "_0_1.s" + contentFieldIndex,
                                "segments_2",
Index: lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestCompoundFile.java	(revision 1137007)
+++ lucene/src/test/org/apache/lucene/index/TestCompoundFile.java	(working copy)
@@ -19,15 +19,20 @@
 
 import java.io.IOException;
 import java.io.File;
+import java.util.HashSet;
+import java.util.Set;
 
 import org.apache.lucene.util.LuceneTestCase;
 import junit.framework.TestSuite;
 import junit.textui.TestRunner;
+
+import org.apache.lucene.store.CompoundFileDirectory;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.SimpleFSDirectory;
 import org.apache.lucene.store._TestHelper;
+import org.apache.lucene.store.CompoundFileDirectory.OpenMode;
 import org.apache.lucene.util._TestUtil;
 
 
@@ -204,7 +209,7 @@
             csw.addFile(name);
             csw.close();
 
-            CompoundFileReader csr = new CompoundFileReader(dir, name + ".cfs");
+            CompoundFileDirectory csr = dir.openCompoundInput(name + ".cfs", 1024, OpenMode.READ);
             IndexInput expected = dir.openInput(name);
             IndexInput actual = csr.openInput(name);
             assertSameStreams(name, expected, actual);
@@ -228,7 +233,7 @@
         csw.addFile("d2");
         csw.close();
 
-        CompoundFileReader csr = new CompoundFileReader(dir, "d.csf");
+        CompoundFileDirectory csr = dir.openCompoundInput("d.csf", 1024, OpenMode.READ);
         IndexInput expected = dir.openInput("d1");
         IndexInput actual = csr.openInput("d1");
         assertSameStreams("d1", expected, actual);
@@ -283,7 +288,7 @@
         }
         csw.close();
 
-        CompoundFileReader csr = new CompoundFileReader(dir, "test.cfs");
+        CompoundFileDirectory csr = dir.openCompoundInput("test.cfs", 1024, OpenMode.READ);
         for (int i=0; i<data.length; i++) {
             IndexInput check = dir.openInput(segment + data[i]);
             IndexInput test = csr.openInput(segment + data[i]);
@@ -350,26 +355,9 @@
         }
     }
 
-
-    static boolean isCSIndexInput(IndexInput is) {
-        return is instanceof CompoundFileReader.CSIndexInput;
-    }
-
-    static boolean isCSIndexInputOpen(IndexInput is) throws IOException {
-        if (isCSIndexInput(is)) {
-            CompoundFileReader.CSIndexInput cis =
-            (CompoundFileReader.CSIndexInput) is;
-
-            return _TestHelper.isSimpleFSIndexInputOpen(cis.base);
-        } else {
-            return false;
-        }
-    }
-
-
     public void testClonedStreamsClosing() throws IOException {
         setUp_2();
-        CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
+        CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024, OpenMode.READ);
 
         // basic clone
         IndexInput expected = dir.openInput("f11");
@@ -379,10 +367,8 @@
         assertTrue(_TestHelper.isSimpleFSIndexInputOpen(expected));
 
         IndexInput one = cr.openInput("f11");
-        assertTrue(isCSIndexInputOpen(one));
 
         IndexInput two = (IndexInput) one.clone();
-        assertTrue(isCSIndexInputOpen(two));
 
         assertSameStreams("basic clone one", expected, one);
         expected.seek(0);
@@ -390,7 +376,6 @@
 
         // Now close the first stream
         one.close();
-        assertTrue("Only close when cr is closed", isCSIndexInputOpen(one));
 
         // The following should really fail since we couldn't expect to
         // access a file once close has been called on it (regardless of
@@ -402,8 +387,6 @@
 
         // Now close the compound reader
         cr.close();
-        assertFalse("Now closed one", isCSIndexInputOpen(one));
-        assertFalse("Now closed two", isCSIndexInputOpen(two));
 
         // The following may also fail since the compound stream is closed
         expected.seek(0);
@@ -426,7 +409,7 @@
      */
     public void testRandomAccess() throws IOException {
         setUp_2();
-        CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
+        CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024, OpenMode.READ);
 
         // Open two files
         IndexInput e1 = dir.openInput("f11");
@@ -505,7 +488,7 @@
      */
     public void testRandomAccessClones() throws IOException {
         setUp_2();
-        CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
+        CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024, OpenMode.READ);
 
         // Open two files
         IndexInput e1 = cr.openInput("f11");
@@ -582,7 +565,7 @@
 
     public void testFileNotFound() throws IOException {
         setUp_2();
-        CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
+        CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024, OpenMode.READ);
 
         // Open two files
         try {
@@ -600,7 +583,7 @@
 
     public void testReadPastEOF() throws IOException {
         setUp_2();
-        CompoundFileReader cr = new CompoundFileReader(dir, "f.comp");
+        CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024, OpenMode.READ);
         IndexInput is = cr.openInput("f2");
         is.seek(is.length() - 10);
         byte b[] = new byte[100];
@@ -653,11 +636,11 @@
        createSequenceFile(dir, "d1", (byte) 0, 15);
 
        Directory newDir = newDirectory();
-       CompoundFileWriter csw = new CompoundFileWriter(newDir, "d.csf");
+       CompoundFileDirectory csw = newDir.openCompoundInput("d.csf", 1024, OpenMode.WRITE);
        csw.addFile("d1", dir);
        csw.close();
 
-       CompoundFileReader csr = new CompoundFileReader(newDir, "d.csf");
+       CompoundFileDirectory csr = newDir.openCompoundInput("d.csf", 1024, OpenMode.READ);
        IndexInput expected = dir.openInput("d1");
        IndexInput actual = csr.openInput("d1");
        assertSameStreams("d1", expected, actual);
@@ -668,5 +651,51 @@
        
        newDir.close();
    }
+   
+   
+  public void testAppend() throws IOException {
+    Directory newDir = newDirectory();
+    CompoundFileDirectory csw = newDir.openCompoundInput("d.csf", 1024,
+        OpenMode.WRITE);
+    int size = 5 + random.nextInt(128);
+    for (int j = 0; j < 2; j++) {
+      IndexOutput os = csw.createOutput("seg" + j + "_foo.txt");
+      for (int i = 0; i < size; i++) {
+        os.writeInt(i);
+      }
+      os.close();
+      String[] listAll = newDir.listAll();
+      assertEquals(1, listAll.length);
+      assertEquals("d.csf", listAll[0]);
+    }
+    createSequenceFile(dir, "d1", (byte) 0, 15);
+    csw.addFile("d1", dir);
+    String[] listAll = newDir.listAll();
+    assertEquals(1, listAll.length);
+    assertEquals("d.csf", listAll[0]);
+    csw.close();
+    CompoundFileDirectory csr = newDir.openCompoundInput("d.csf", 1024,
+        OpenMode.READ);
+    for (int j = 0; j < 2; j++) {
+      IndexInput openInput = csr.openInput("seg" + j + "_foo.txt");
+      assertEquals(size * 4, openInput.length());
+      for (int i = 0; i < size; i++) {
+        assertEquals(i, openInput.readInt());
+      }
 
+      openInput.close();
+
+    }
+    IndexInput expected = dir.openInput("d1");
+    IndexInput actual = csr.openInput("d1");
+    assertSameStreams("d1", expected, actual);
+    assertSameSeekBehavior("d1", expected, actual);
+    expected.close();
+    actual.close();
+    csr.close();
+    newDir.close();
+
+  }
+   
+
 }
Index: lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java	(revision 1137007)
+++ lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java	(working copy)
@@ -20,6 +20,7 @@
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.search.DefaultSimilarity;
 import org.apache.lucene.search.Similarity;
+import org.apache.lucene.store.CompoundFileDirectory;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
@@ -91,7 +92,7 @@
     // figure out which field number corresponds to
     // "content", and then set our expected file names below
     // accordingly:
-    CompoundFileReader cfsReader = new CompoundFileReader(dir, "_2.cfs");
+    CompoundFileDirectory cfsReader = dir.openCompoundInput("_2.cfs", 1024, org.apache.lucene.store.CompoundFileDirectory.OpenMode.READ);
     FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
     int contentFieldIndex = -1;
     for (FieldInfo fi : fieldInfos) {
