Index: lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java
===================================================================
--- lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java	(revision 1141573)
+++ lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java	(working copy)
@@ -25,6 +25,7 @@
 import org.apache.lucene.index.SegmentReadState;
 import org.apache.lucene.index.SegmentWriteState;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DocValuesConsumer;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
@@ -58,8 +59,12 @@
   public static String CODEC_NAME = "Appending";
   
   public AppendingCodec() {
-    name = CODEC_NAME;
+    this(new CodecConfig());
   }
+  
+  public AppendingCodec(CodecConfig config) {
+    super(CODEC_NAME, config);
+  }
 
   @Override
   public FieldsConsumer fieldsConsumer(SegmentWriteState state)
@@ -117,7 +122,7 @@
               state.dir, state.fieldInfos, state.segmentInfo.name,
               docsReader,
               state.readBufferSize,
-              StandardCodec.TERMS_CACHE_SIZE,
+              config.getTermCacheSize(),
               state.codecId);
       success = true;
       return ret;
@@ -138,22 +143,22 @@
     StandardPostingsReader.files(dir, segmentInfo, codecId, files);
     BlockTermsReader.files(dir, segmentInfo, codecId, files);
     FixedGapTermsIndexReader.files(dir, segmentInfo, codecId, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files, config.getDocValuesUseCFS());
   }
 
   @Override
   public void getExtensions(Set<String> extensions) {
     StandardCodec.getStandardExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
   
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java	(working copy)
@@ -28,6 +28,7 @@
 import java.util.TreeSet;
 
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
 import org.apache.lucene.index.codecs.PerDocConsumer;
@@ -51,7 +52,7 @@
   private final SegmentCodecs segmentCodecs;
 
   PerFieldCodecWrapper(SegmentCodecs segmentCodecs) {
-    name = "PerField";
+    super("PerField", new CodecConfig());
     this.segmentCodecs = segmentCodecs;
   }
 
Index: lucene/src/java/org/apache/lucene/index/codecs/Codec.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/Codec.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/Codec.java	(working copy)
@@ -31,7 +31,13 @@
   public static final Codec[] EMPTY = new Codec[0];
   /** Unique name that's used to retrieve this codec when
    *  reading the index */
-  public String name;
+  public final String name;
+  protected CodecConfig config;
+  
+  protected Codec(String name, CodecConfig config) {
+    this.name = name;
+    this.config = (CodecConfig) config.clone();
+  }
 
   /** Writes a new segment */
   public abstract FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException;
@@ -68,7 +74,7 @@
 
   /** Records all file extensions this codec uses */
   public abstract void getExtensions(Set<String> extensions);
-
+  
   @Override
   public String toString() {
     return name;
Index: lucene/src/java/org/apache/lucene/index/codecs/CodecConfig.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/CodecConfig.java	(revision 0)
+++ lucene/src/java/org/apache/lucene/index/codecs/CodecConfig.java	(revision 0)
@@ -0,0 +1,166 @@
+package org.apache.lucene.index.codecs;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.util.Comparator;
+
+import org.apache.lucene.index.codecs.pulsing.PulsingCodec;
+import org.apache.lucene.util.BytesRef;
+
+/**
+ * Holds all the configuration for a {@link Codec}. You should instantiate this
+ * class, call the setters to set your configuration, then pass it to a
+ * {@link Codec} constructor. Note that {@link Codec} makes a private clone;
+ * further changes to the given config will have no effect to the created codec.
+ * 
+ * <p>
+ * All setter methods return {@link CodecConfig} to allow chaining settings
+ * conveniently, for example:
+ * 
+ * <pre>
+ * CodecConfig conf = new CodecConfig();
+ * conf.setter1().setter2();
+ * </pre>
+ * 
+ * @since 4.0
+ */
+public class CodecConfig implements Cloneable {
+
+  /** Default is 1024. Change using {@link #setTermCacheSize(int)} */
+  public static final int DEFAULT_TERM_CACHE_SIZE = 1024;
+
+  /** Default is 1. Change using {@link #setPulsingFreqCutoff(int)} */
+  public static final int DEFAULT_PULSING_FREQ_CUTOFF = 1;
+
+  private int pulsingFreqCutoff = DEFAULT_PULSING_FREQ_CUTOFF;
+  private boolean docValuesUseCFS = true;
+  private Comparator<BytesRef> docValuesByteComparator = BytesRef
+      .getUTF8SortedAsUnicodeComparator();
+  private int termCacheSize = DEFAULT_TERM_CACHE_SIZE;
+
+  /**
+   * If set to <code>true</code> the codec will use a compound file for
+   * IndexDocValues, otherwise each IndexDocValues field will create up to 2
+   * files per segment.
+   */
+  public CodecConfig setDocValuesUseCFS(boolean docValuesUseCFS) {
+    this.docValuesUseCFS = docValuesUseCFS;
+    return this;
+  }
+
+  /**
+   * Returns <code>true</code> iff compound file should be used for
+   * IndexDocValues, otherwise <code>false</code>.
+   * 
+   * @see #setDocValuesUseCFS(boolean);
+   * @return <code>true</code> iff compound file should be used for
+   *         IndexDocValues, otherwise <code>false</code>.
+   */
+  public boolean getDocValuesUseCFS() {
+    return docValuesUseCFS;
+  }
+
+  /**
+   * Sets the {@link BytesRef} comparator for sorted IndexDocValue variants. The
+   * default is {@link BytesRef#getUTF8SortedAsUnicodeComparator()}.
+   */
+  public CodecConfig setDocValuesByteComparator(
+      Comparator<BytesRef> docValuesByteComparator) {
+    this.docValuesByteComparator = docValuesByteComparator;
+    return this;
+  }
+
+  /**
+   * Returns the {@link BytesRef} comparator for sorted IndexDocValue variants.
+   * The default is {@link BytesRef#getUTF8SortedAsUnicodeComparator()}.
+   */
+  public Comparator<BytesRef> getDocValuesByteComparator() {
+    return docValuesByteComparator;
+  }
+
+  /**
+   * Sets the size of the term dictionaries term cache for term lookups
+   * <p>
+   * NOTE: the size must be >= 1 otherwise a {@link IllegalArgumentException} is
+   * thrown
+   * 
+   * @throws IllegalArgumentException
+   *           if the given size is < 1
+   * @see #DEFAULT_TERM_CACHE_SIZE
+   */
+  public CodecConfig setTermCacheSize(int termCacheSize) {
+    if (termCacheSize < 1) {
+      throw new IllegalArgumentException("termCacheSize must be >= 1");
+    }
+    this.termCacheSize = termCacheSize;
+    return this;
+  }
+
+  /**
+   * Returns the size of the term dictionaries term cache for term lookups
+   * 
+   * @see #DEFAULT_TERM_CACHE_SIZE
+   */
+  public int getTermCacheSize() {
+    return termCacheSize;
+  }
+
+  /**
+   * Sets the cutoff term frequency where terms with term frequency <=
+   * freqCutoff are inlined into terms dictionary.
+   * <p>
+   * NOTE: this option is only used by {@link PulsingCodec}
+   */
+  public CodecConfig setPulsingFreqCutoff(int pulsingFreqCutoff) {
+    this.pulsingFreqCutoff = pulsingFreqCutoff;
+    return this;
+  }
+
+  /**
+   * Returns the cutoff term frequency where terms with term frequency <=
+   * freqCutoff are inlined into terms dictionary.
+   * <p>
+   * NOTE: this option is only used by {@link PulsingCodec}
+   */
+  public int getPulsingFreqCutoff() {
+    return pulsingFreqCutoff;
+  }
+
+  @Override
+  public Object clone() {
+    try {
+      return super.clone();
+    } catch (CloneNotSupportedException e) {
+      // should not happen
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("docValuesUseCFS=").append(docValuesUseCFS).append("\n");
+    sb.append("pulsingFreqCutoff=").append(pulsingFreqCutoff).append("\n");
+    sb.append("termCacheSize=").append(termCacheSize).append("\n");
+    sb.append("docValuesByteComparator=")
+        .append(
+            docValuesByteComparator == null ? "null" : docValuesByteComparator
+                .getClass().getName()).append("\n");
+    return sb.toString();
+  }
+
+}
Index: lucene/src/java/org/apache/lucene/index/codecs/CoreCodecProvider.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/CoreCodecProvider.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/CoreCodecProvider.java	(working copy)
@@ -44,7 +44,7 @@
   public CoreCodecProvider() {
     register(new StandardCodec());
     register(new PreFlexCodec());
-    register(new PulsingCodec(1));
+    register(new PulsingCodec());
     register(new SimpleTextCodec());
     register(new MemoryCodec());
   }
Index: lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java	(working copy)
@@ -37,73 +37,91 @@
   private final Directory directory;
   private final AtomicLong bytesUsed;
   private final Comparator<BytesRef> comparator;
-
-  public DefaultDocValuesConsumer(PerDocWriteState state, Comparator<BytesRef> comparator) {
+  private boolean useCompoundFile;
+  public DefaultDocValuesConsumer(PerDocWriteState state, Comparator<BytesRef> comparator, boolean useCompoundFile) throws IOException {
     this.segmentName = state.segmentName;
     this.codecId = state.codecId;
     this.bytesUsed = state.bytesUsed;
-    this.directory = state.directory;
+    //TODO maybe we should enable a global CFS that all codecs can pull on demand to further reduce the number of files?
+    this.directory = useCompoundFile ? state.directory.createCompoundOutput(IndexFileNames.segmentFileName(segmentName, state.codecId, IndexFileNames.COMPOUND_FILE_EXTENSION)) : state.directory;
     this.comparator = comparator;
+    this.useCompoundFile = useCompoundFile;
   }
-  
+
   public void close() throws IOException {
+    if (useCompoundFile) {
+      this.directory.close();
+    }
   }
 
   @Override
   public DocValuesConsumer addValuesField(FieldInfo field) throws IOException {
     return Writer.create(field.getDocValues(),
         docValuesId(segmentName, codecId, field.number),
-        // TODO can we have a compound file per segment and codec for
-        // docvalues?
         directory, comparator, bytesUsed);
   }
   
   @SuppressWarnings("fallthrough")
   public static void files(Directory dir, SegmentInfo segmentInfo, int codecId,
-      Set<String> files) throws IOException {
+      Set<String> files, boolean useCompoundFile) throws IOException {
     FieldInfos fieldInfos = segmentInfo.getFieldInfos();
     for (FieldInfo fieldInfo : fieldInfos) {
       if (fieldInfo.getCodecId() == codecId && fieldInfo.hasDocValues()) {
         String filename = docValuesId(segmentInfo.name, codecId,
             fieldInfo.number);
-        switch (fieldInfo.getDocValues()) {
-        case BYTES_FIXED_DEREF:
-        case BYTES_VAR_DEREF:
-        case BYTES_VAR_SORTED:
-        case BYTES_FIXED_SORTED:
-        case BYTES_VAR_STRAIGHT:
-          files.add(IndexFileNames.segmentFileName(filename, "",
-              Writer.INDEX_EXTENSION));
-          assert dir.fileExists(IndexFileNames.segmentFileName(filename, "",
-              Writer.INDEX_EXTENSION));
-          // until here all types use an index
-        case BYTES_FIXED_STRAIGHT:
-        case FLOAT_32:
-        case FLOAT_64:
-        case VAR_INTS:
-        case FIXED_INTS_16:
-        case FIXED_INTS_32:
-        case FIXED_INTS_64:
-        case FIXED_INTS_8:
-          files.add(IndexFileNames.segmentFileName(filename, "",
-              Writer.DATA_EXTENSION));
-          assert dir.fileExists(IndexFileNames.segmentFileName(filename, "",
-              Writer.DATA_EXTENSION));
-          break;
-      
-        default:
-          assert false;
+        if (useCompoundFile) {
+          files.add(IndexFileNames.segmentFileName(segmentInfo.name, codecId, IndexFileNames.COMPOUND_FILE_EXTENSION));
+          files.add(IndexFileNames.segmentFileName(segmentInfo.name, codecId, IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
+          assert dir.fileExists(IndexFileNames.segmentFileName(segmentInfo.name, codecId, IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION)); 
+          assert dir.fileExists(IndexFileNames.segmentFileName(segmentInfo.name, codecId, IndexFileNames.COMPOUND_FILE_EXTENSION)); 
+          return;
+        } else {
+          switch (fieldInfo.getDocValues()) {
+          case BYTES_FIXED_DEREF:
+          case BYTES_VAR_DEREF:
+          case BYTES_VAR_SORTED:
+          case BYTES_FIXED_SORTED:
+          case BYTES_VAR_STRAIGHT:
+            files.add(IndexFileNames.segmentFileName(filename, "",
+                Writer.INDEX_EXTENSION));
+            assert dir.fileExists(IndexFileNames.segmentFileName(filename, "",
+                Writer.INDEX_EXTENSION));
+            // until here all types use an index
+          case BYTES_FIXED_STRAIGHT:
+          case FLOAT_32:
+          case FLOAT_64:
+          case VAR_INTS:
+          case FIXED_INTS_16:
+          case FIXED_INTS_32:
+          case FIXED_INTS_64:
+          case FIXED_INTS_8:
+            files.add(IndexFileNames.segmentFileName(filename, "",
+                Writer.DATA_EXTENSION));
+            assert dir.fileExists(IndexFileNames.segmentFileName(filename, "",
+                Writer.DATA_EXTENSION));
+            break;
+        
+          default:
+            assert false;
+          }
         }
       }
     }
   }
   
+
   static String docValuesId(String segmentsName, int codecID, int fieldId) {
     return segmentsName + "_" + codecID + "-" + fieldId;
   }
+  
+  public static void getDocValuesExtensions(Set<String> extensions, boolean useCompoundFile) {
+    if (useCompoundFile) {
+      extensions.add(IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION);
+      extensions.add(IndexFileNames.COMPOUND_FILE_EXTENSION);
+    } else {
+      extensions.add(Writer.DATA_EXTENSION);
+      extensions.add(Writer.INDEX_EXTENSION);
+    }
+  }
 
-  public static void getDocValuesExtensions(Set<String> extensions) {
-    extensions.add(Writer.DATA_EXTENSION);
-    extensions.add(Writer.INDEX_EXTENSION);
-  }
 }
Index: lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java	(working copy)
@@ -16,12 +16,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+import java.io.Closeable;
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.TreeMap;
 
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.IndexFileNames;
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.index.values.Bytes;
 import org.apache.lucene.index.values.IndexDocValues;
@@ -29,6 +32,7 @@
 import org.apache.lucene.index.values.Ints;
 import org.apache.lucene.index.values.ValueType;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.IOUtils;
 
 /**
  * Abstract base class for FieldsProducer implementations supporting
@@ -39,6 +43,8 @@
 public class DefaultDocValuesProducer extends PerDocValues {
 
   protected final TreeMap<String, IndexDocValues> docValues;
+  private final boolean useCompoundFile;
+  private final Closeable cfs;
 
   /**
    * Creates a new {@link DefaultDocValuesProducer} instance and loads all
@@ -55,9 +61,22 @@
    * @throws IOException
    *           if an {@link IOException} occurs
    */
+//  public DefaultDocValuesProducer(SegmentInfo si, Directory dir,
+//      FieldInfos fieldInfo, int codecId) throws IOException {
+//    this(si, dir, fieldInfo, codecId, true);
+//  }
+  
   public DefaultDocValuesProducer(SegmentInfo si, Directory dir,
-      FieldInfos fieldInfo, int codecId) throws IOException {
-    docValues = load(fieldInfo, si.name, si.docCount, dir, codecId);
+      FieldInfos fieldInfo, int codecId, boolean useCompoundFile) throws IOException {
+    this.useCompoundFile = useCompoundFile;
+    final Directory directory;
+    if (useCompoundFile) {
+      cfs = directory = dir.openCompoundInput(IndexFileNames.segmentFileName(si.name, codecId, IndexFileNames.COMPOUND_FILE_EXTENSION), 1024);
+    } else {
+      cfs = null;
+      directory = dir;
+    }
+    docValues = load(fieldInfo, si.name, si.docCount, directory, codecId);
   }
 
   /**
@@ -92,7 +111,7 @@
     } finally {
       if (!success) {
         // if we fail we must close all opened resources if there are any
-        closeDocValues(values.values());
+        closeInternal(values.values());
       }
     }
     return values;
@@ -149,22 +168,20 @@
   }
 
   public void close() throws IOException {
-    closeDocValues(docValues.values());
+    closeInternal(docValues.values());
   }
 
-  private void closeDocValues(final Collection<IndexDocValues> values)
-      throws IOException {
-    IOException ex = null;
-    for (IndexDocValues docValues : values) {
-      try {
-        docValues.close();
-      } catch (IOException e) {
-        ex = e;
-      }
-    }
-    if (ex != null) {
-      throw ex;
-    }
+  private void closeInternal(Collection<? extends Closeable> closeables) throws IOException {
+    final Collection<? extends Closeable> toClose;
+    if (useCompoundFile) {
+      final ArrayList<Closeable> list = new ArrayList<Closeable>(closeables);
+      list.add(cfs);
+      toClose = list; 
+    } else {
+      toClose = docValues.values();
+    
+    } 
+    IOUtils.closeSafely(false, toClose);
   }
 
   @Override
Index: lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java	(working copy)
@@ -37,6 +37,7 @@
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesConsumer;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
@@ -76,10 +77,14 @@
  * @lucene.experimental */
 
 public class MemoryCodec extends Codec {
-
+  
   public MemoryCodec() {
-    name = "Memory";
+    this(new CodecConfig());
   }
+  
+  public MemoryCodec(CodecConfig config) {
+    super("Memory", config);
+  }
 
   private static final boolean VERBOSE = false;
 
@@ -778,22 +783,22 @@
   @Override
   public void files(Directory dir, SegmentInfo segmentInfo, int id, Set<String> files) throws IOException {
     files.add(IndexFileNames.segmentFileName(segmentInfo.name, id, EXTENSION));
-    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files, config.getDocValuesUseCFS());
   }
 
   @Override
   public void getExtensions(Set<String> extensions) {
     extensions.add(EXTENSION);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java	(working copy)
@@ -26,6 +26,7 @@
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.index.SegmentWriteState;
 import org.apache.lucene.index.SegmentReadState;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
 import org.apache.lucene.index.codecs.PerDocConsumer;
@@ -55,7 +56,7 @@
   public static final String PROX_EXTENSION = "prx";
 
   public PreFlexCodec() {
-    name = "PreFlex";
+    super("PreFlex", new CodecConfig());
   }
   
   @Override
Index: lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java	(working copy)
@@ -29,6 +29,7 @@
 import org.apache.lucene.index.codecs.standard.StandardPostingsWriter;
 import org.apache.lucene.index.codecs.PostingsReaderBase;
 import org.apache.lucene.index.codecs.standard.StandardPostingsReader;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
@@ -58,12 +59,14 @@
 
   private final int freqCutoff;
 
-  /** Terms with freq <= freqCutoff are inlined into terms
-   *  dict. */
-  public PulsingCodec(int freqCutoff) {
-    name = "Pulsing";
-    this.freqCutoff = freqCutoff;
+  public PulsingCodec() {
+    this(new CodecConfig());
   }
+  
+  public PulsingCodec(CodecConfig config) {
+    super("Pulsing", config);
+    this.freqCutoff = config.getPulsingFreqCutoff();
+  }
 
   @Override
   public String toString() {
@@ -137,7 +140,7 @@
                                                 state.dir, state.fieldInfos, state.segmentInfo.name,
                                                 pulsingReader,
                                                 state.readBufferSize,
-                                                StandardCodec.TERMS_CACHE_SIZE,
+                                                config.getTermCacheSize(),
                                                 state.codecId);
       success = true;
       return ret;
@@ -157,22 +160,22 @@
     StandardPostingsReader.files(dir, segmentInfo, id, files);
     BlockTermsReader.files(dir, segmentInfo, id, files);
     VariableGapTermsIndexReader.files(dir, segmentInfo, id, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files, config.getDocValuesUseCFS());
   }
 
   @Override
   public void getExtensions(Set<String> extensions) {
     StandardCodec.getStandardExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
   
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java	(working copy)
@@ -26,6 +26,7 @@
 import org.apache.lucene.index.SegmentReadState;
 import org.apache.lucene.index.IndexFileNames;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
@@ -33,7 +34,6 @@
 import org.apache.lucene.index.codecs.DefaultDocValuesConsumer;
 import org.apache.lucene.index.codecs.PerDocValues;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
 
 /** For debugging, curiosity, transparency only!!  Do not
  *  use this codec in production.
@@ -44,11 +44,15 @@
  *
  *  @lucene.experimental */
 public class SimpleTextCodec extends Codec {
-
+  
   public SimpleTextCodec() {
-    name = "SimpleText";
+    this(new CodecConfig());
   }
 
+  public SimpleTextCodec(CodecConfig config) {
+    super("SimpleText", config);
+  }
+
   @Override
   public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
     return new SimpleTextFieldsWriter(state);
@@ -69,23 +73,23 @@
   @Override
   public void files(Directory dir, SegmentInfo segmentInfo, int id, Set<String> files) throws IOException {
     files.add(getPostingsFileName(segmentInfo.name, id));
-    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files, config.getDocValuesUseCFS());
   }
 
   @Override
   public void getExtensions(Set<String> extensions) {
     extensions.add(POSTINGS_EXTENSION);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
   
   // TODO: would be great if these used a plain text impl
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java	(working copy)
@@ -25,6 +25,7 @@
 import org.apache.lucene.index.SegmentWriteState;
 import org.apache.lucene.index.SegmentReadState;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
 import org.apache.lucene.index.codecs.PerDocConsumer;
@@ -40,16 +41,19 @@
 import org.apache.lucene.index.codecs.BlockTermsReader;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.BytesRef;
 
 /** Default codec. 
  *  @lucene.experimental */
 public class StandardCodec extends Codec {
 
   public StandardCodec() {
-    name = "Standard";
+    this(new CodecConfig());
   }
 
+  public StandardCodec(CodecConfig config) {
+    super("Standard", config);
+  }
+
   @Override
   public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
     PostingsWriterBase docs = new StandardPostingsWriter(state);
@@ -85,8 +89,6 @@
     }
   }
 
-  public final static int TERMS_CACHE_SIZE = 1024;
-
   @Override
   public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
     PostingsReaderBase postings = new StandardPostingsReader(state.dir, state.segmentInfo, state.readBufferSize, state.codecId);
@@ -114,7 +116,7 @@
                                                 state.segmentInfo.name,
                                                 postings,
                                                 state.readBufferSize,
-                                                TERMS_CACHE_SIZE,
+                                                config.getTermCacheSize(),
                                                 state.codecId);
       success = true;
       return ret;
@@ -140,13 +142,13 @@
     StandardPostingsReader.files(dir, segmentInfo, id, files);
     BlockTermsReader.files(dir, segmentInfo, id, files);
     VariableGapTermsIndexReader.files(dir, segmentInfo, id, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, id, files, config.getDocValuesUseCFS());
   }
 
   @Override
   public void getExtensions(Set<String> extensions) {
     getStandardExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
 
   public static void getStandardExtensions(Set<String> extensions) {
@@ -158,11 +160,11 @@
 
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java	(working copy)
@@ -60,7 +60,7 @@
    * NOTE: subclasses must call {@link #initForRead(Map)} before the directory can be used.
    */
   public CompoundFileDirectory(Directory directory, String fileName, int readBufferSize) throws IOException {
-    assert !(directory instanceof CompoundFileDirectory) : "compound file inside of compound file: " + fileName;
+
     this.directory = directory;
     this.fileName = fileName;
     this.readBufferSize = readBufferSize;
@@ -75,6 +75,7 @@
   }
   
   protected final void initForWrite() {
+    assert !(directory instanceof CompoundFileDirectory) : "compound file inside of compound file: " + fileName;
     this.entries = SENTINEL;
     this.openForWrite = true;
     this.isOpen = true;
@@ -173,7 +174,11 @@
   
   @Override
   public synchronized void close() throws IOException {
-    ensureOpen();
+    if (!isOpen) {
+      // allow double close - usually to be consistent with other closeables
+      assert entries == null; 
+      return; // already closed
+     }
     entries = null;
     isOpen = false;
     if (writer != null) {
@@ -285,12 +290,13 @@
     throw new UnsupportedOperationException();
   }
   
-  /** Not implemented
-   * @throws UnsupportedOperationException */
   @Override
-  public final CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
-    // NOTE: final to make nested compounding impossible.
-    throw new UnsupportedOperationException();
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
+    FileEntry fileEntry = this.entries.get(IndexFileNames.stripSegmentName(name));
+    if (fileEntry == null) {
+      throw new FileNotFoundException("file " + name + " does not exists in this CFS");
+    }
+    return new NestedCompoundFileDirectory(name, bufferSize, fileEntry.offset, fileEntry.length);
   }
   
   /** Not implemented
@@ -298,8 +304,7 @@
   @Override
   public CompoundFileDirectory createCompoundOutput(String name)
       throws IOException {
-    // NOTE: final to make nested compounding impossible.
-    throw new UnsupportedOperationException();
+    throw new UnsupportedOperationException("can not create nested CFS, create seperately and use Directory.copy instead");
   }
   
   private final void initWriter() {
@@ -309,5 +314,34 @@
       writer = new CompoundFileWriter(directory, fileName);
     }
   }
- 
+  
+  private class NestedCompoundFileDirectory extends CompoundFileDirectory {
+
+    private final long cfsOffset;
+    private final long cfsLength;
+
+    public NestedCompoundFileDirectory(String fileName, int readBufferSize, long offset, long length)
+        throws IOException {
+      super(directory, fileName, readBufferSize);
+      this.cfsOffset = offset;
+      this.cfsLength = length;
+      IndexInput input = null;
+      try {
+        input = CompoundFileDirectory.this.openInput(fileName, 128);
+        initForRead(CompoundFileDirectory.readEntries(input,
+            CompoundFileDirectory.this, fileName));
+      } finally {
+        IOUtils.closeSafely(false, input);
+      }
+    }
+
+    @Override
+    public IndexInput openInputSlice(String id, long offset, long length,
+        int readBufferSize) throws IOException {
+      assert offset + length <= cfsLength; 
+      return CompoundFileDirectory.this.openInputSlice(id, cfsOffset + offset, length, readBufferSize);
+    }
+    
+  }
+  
 }
Index: lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java
===================================================================
--- lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java	(revision 1141573)
+++ lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java	(working copy)
@@ -17,6 +17,7 @@
  * limitations under the License.
  */
 
+import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Collection;
@@ -55,7 +56,7 @@
  * 
  * @lucene.internal
  */
-final class CompoundFileWriter {
+final class CompoundFileWriter implements Closeable{
 
   private static final class FileEntry {
     /** source file */
@@ -89,8 +90,8 @@
   private boolean closed = false;
   private volatile IndexOutput dataOut;
   private final AtomicBoolean outputTaken = new AtomicBoolean(false);
-  private final String entryTableName;
-  private final String dataFileName;
+  final String entryTableName;
+  final String dataFileName;
 
   /**
    * Create the compound stream in the specified file. The file name is the
@@ -128,7 +129,7 @@
    *           if close() had been called before or if no file has been added to
    *           this object
    */
-  void close() throws IOException {
+  public void close() throws IOException {
     if (closed) {
       throw new IllegalStateException("already closed");
     }
@@ -147,12 +148,18 @@
       assert dataOut != null;
       long finalLength = dataOut.getFilePointer();
       assert assertFileLength(finalLength, dataOut);
+    } catch (IOException e) {
+      priorException = e;
+    } finally {
+      IOUtils.closeSafely(priorException, dataOut);
+    }
+    try {
       entryTableOut = directory.createOutput(entryTableName);
       writeEntryTable(entries.values(), entryTableOut);
     } catch (IOException e) {
       priorException = e;
     } finally {
-      IOUtils.closeSafely(priorException, dataOut, entryTableOut);
+      IOUtils.closeSafely(priorException, entryTableOut);
     }
   }
 
@@ -321,6 +328,7 @@
         closed = true;
         entry.length = writtenBytes;
         if (isSeparate) {
+          delegate.close();
           // we are a separate file - push into the pending entries
           pendingEntries.add(entry);
         } else {
Index: lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java	(working copy)
@@ -34,6 +34,7 @@
 import org.apache.lucene.index.codecs.sep.SepPostingsWriterImpl;
 import org.apache.lucene.index.codecs.intblock.FixedIntBlockIndexInput;
 import org.apache.lucene.index.codecs.intblock.FixedIntBlockIndexOutput;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
 import org.apache.lucene.index.codecs.FixedGapTermsIndexWriter;
@@ -46,7 +47,6 @@
 import org.apache.lucene.index.codecs.BlockTermsWriter;
 import org.apache.lucene.index.codecs.TermsIndexReaderBase;
 import org.apache.lucene.index.codecs.TermsIndexWriterBase;
-import org.apache.lucene.index.codecs.standard.StandardCodec;
 import org.apache.lucene.store.*;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
@@ -62,8 +62,12 @@
   private final int blockSize;
 
   public MockFixedIntBlockCodec(int blockSize) {
+    this(blockSize, new CodecConfig());
+  }
+  
+  public MockFixedIntBlockCodec(int blockSize, CodecConfig config) {
+    super("MockFixedIntBlock", config);
     this.blockSize = blockSize;
-    name = "MockFixedIntBlock";
   }
 
   @Override
@@ -186,7 +190,7 @@
                                                 state.segmentInfo.name,
                                                 postingsReader,
                                                 state.readBufferSize,
-                                                StandardCodec.TERMS_CACHE_SIZE,
+                                                config.getTermCacheSize(),
                                                 state.codecId);
       success = true;
       return ret;
@@ -206,7 +210,7 @@
     SepPostingsReaderImpl.files(segmentInfo, codecId, files);
     BlockTermsReader.files(dir, segmentInfo, codecId, files);
     FixedGapTermsIndexReader.files(dir, segmentInfo, codecId, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files, config.getDocValuesUseCFS());
   }
 
   @Override
@@ -214,16 +218,16 @@
     SepPostingsWriterImpl.getExtensions(extensions);
     BlockTermsReader.getExtensions(extensions);
     FixedGapTermsIndexReader.getIndexExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
   
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java	(working copy)
@@ -34,6 +34,7 @@
 import org.apache.lucene.index.codecs.sep.SepPostingsWriterImpl;
 import org.apache.lucene.index.codecs.intblock.VariableIntBlockIndexInput;
 import org.apache.lucene.index.codecs.intblock.VariableIntBlockIndexOutput;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
 import org.apache.lucene.index.codecs.FixedGapTermsIndexWriter;
@@ -46,7 +47,6 @@
 import org.apache.lucene.index.codecs.BlockTermsWriter;
 import org.apache.lucene.index.codecs.TermsIndexReaderBase;
 import org.apache.lucene.index.codecs.TermsIndexWriterBase;
-import org.apache.lucene.index.codecs.standard.StandardCodec;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
@@ -62,9 +62,13 @@
 
 public class MockVariableIntBlockCodec extends Codec {
   private final int baseBlockSize;
+  
+  public MockVariableIntBlockCodec(int baseBlockSize) {
+    this(baseBlockSize, new CodecConfig());
+  }
 
-  public MockVariableIntBlockCodec(int baseBlockSize) {
-    name = "MockVariableIntBlock";
+  public MockVariableIntBlockCodec(int baseBlockSize, CodecConfig config) {
+    super("MockVariableIntBlock", config);
     this.baseBlockSize = baseBlockSize;
   }
 
@@ -209,7 +213,7 @@
                                                 state.segmentInfo.name,
                                                 postingsReader,
                                                 state.readBufferSize,
-                                                StandardCodec.TERMS_CACHE_SIZE,
+                                                config.getTermCacheSize(),
                                                 state.codecId);
       success = true;
       return ret;
@@ -229,7 +233,7 @@
     SepPostingsReaderImpl.files(segmentInfo, codecId, files);
     BlockTermsReader.files(dir, segmentInfo, codecId, files);
     FixedGapTermsIndexReader.files(dir, segmentInfo, codecId, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files, config.getDocValuesUseCFS());
   }
 
   @Override
@@ -237,16 +241,16 @@
     SepPostingsWriterImpl.getExtensions(extensions);
     BlockTermsReader.getExtensions(extensions);
     FixedGapTermsIndexReader.getIndexExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
   
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java	(working copy)
@@ -33,6 +33,7 @@
 import org.apache.lucene.index.codecs.BlockTermsReader;
 import org.apache.lucene.index.codecs.BlockTermsWriter;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
@@ -75,9 +76,13 @@
 
   private final Random seedRandom;
   private final String SEED_EXT = "sd";
+  
+  public MockRandomCodec(Random random) {
+    this(random, new CodecConfig());
+  }
 
-  public MockRandomCodec(Random random) {
-    name = "MockRandom";
+  public MockRandomCodec(Random random, CodecConfig config) {
+    super("MockRandom", config);
     this.seedRandom = new Random(random.nextLong());
   }
 
@@ -354,7 +359,7 @@
     BlockTermsReader.files(dir, segmentInfo, codecId, files);
     FixedGapTermsIndexReader.files(dir, segmentInfo, codecId, files);
     VariableGapTermsIndexReader.files(dir, segmentInfo, codecId, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files, config.getDocValuesUseCFS());
     // hackish!
     Iterator<String> it = files.iterator();
     while(it.hasNext()) {
@@ -372,7 +377,7 @@
     BlockTermsReader.getExtensions(extensions);
     FixedGapTermsIndexReader.getIndexExtensions(extensions);
     VariableGapTermsIndexReader.getIndexExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
     extensions.add(SEED_EXT);
     //System.out.println("MockRandom.getExtensions return " + extensions);
   }
@@ -380,11 +385,11 @@
   // can we make this more evil?
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java	(working copy)
@@ -25,6 +25,7 @@
 import org.apache.lucene.index.SegmentWriteState;
 import org.apache.lucene.index.SegmentReadState;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
@@ -54,8 +55,12 @@
 public class MockSepCodec extends Codec {
 
   public MockSepCodec() {
-    name = "MockSep";
+    this(new CodecConfig());
   }
+  
+  public MockSepCodec(CodecConfig config) {
+    super("MockSep", config);
+  }
 
   @Override
   public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
@@ -119,7 +124,7 @@
                                                 state.segmentInfo.name,
                                                 postingsReader,
                                                 state.readBufferSize,
-                                                StandardCodec.TERMS_CACHE_SIZE,
+                                                config.getTermCacheSize(),
                                                 state.codecId);
       success = true;
       return ret;
@@ -139,13 +144,13 @@
     SepPostingsReaderImpl.files(segmentInfo, codecId, files);
     BlockTermsReader.files(dir, segmentInfo, codecId, files);
     FixedGapTermsIndexReader.files(dir, segmentInfo, codecId, files);
-    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files);
+    DefaultDocValuesConsumer.files(dir, segmentInfo, codecId, files, config.getDocValuesUseCFS());
   }
 
   @Override
   public void getExtensions(Set<String> extensions) {
     getSepExtensions(extensions);
-    DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+    DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
   }
 
   public static void getSepExtensions(Set<String> extensions) {
@@ -156,11 +161,11 @@
   
   @Override
   public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-    return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+    return new DefaultDocValuesConsumer(state, config.getDocValuesByteComparator(), config.getDocValuesUseCFS());
   }
 
   @Override
   public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+    return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
   }
 }
Index: lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java	(working copy)
@@ -23,6 +23,7 @@
 import org.apache.lucene.index.SegmentReadState;
 import org.apache.lucene.index.codecs.preflex.PreFlexCodec;
 import org.apache.lucene.index.codecs.preflex.PreFlexFields;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.FieldsConsumer;
 import org.apache.lucene.index.codecs.FieldsProducer;
 import org.apache.lucene.util.LuceneTestCase;
@@ -37,7 +38,6 @@
   public PreFlexRWCodec() {
     // NOTE: we impersonate the PreFlex codec so that it can
     // read the segments we write!
-    super();
   }
   
   @Override
Index: lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java	(working copy)
@@ -148,4 +148,9 @@
   public CompoundFileDirectory createCompoundOutput(String name) throws IOException {
     return delegate.createCompoundOutput(name);
   }
+  
+  public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
+    return delegate.openCompoundInput(name, bufferSize);
+  }
+
 }
Index: lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java
===================================================================
--- lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java	(revision 1141573)
+++ lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java	(working copy)
@@ -42,6 +42,7 @@
 import org.apache.lucene.document.Field.TermVector;
 import org.apache.lucene.index.*;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.CodecProvider;
 import org.apache.lucene.index.codecs.mockintblock.MockFixedIntBlockCodec;
 import org.apache.lucene.index.codecs.mockintblock.MockVariableIntBlockCodec;
@@ -276,15 +277,23 @@
       swapCodec(new PreFlexRWCodec(), cp);
     }
 
-    swapCodec(new MockSepCodec(), cp);
-    swapCodec(new PulsingCodec(codecHasParam && "Pulsing".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 20)), cp);
-    swapCodec(new MockFixedIntBlockCodec(codecHasParam && "MockFixedIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 2000)), cp);
+    swapCodec(new MockSepCodec(newCodecConfig()), cp);
+    swapCodec(new PulsingCodec(codecHasParam && "Pulsing".equals(codec) ? newCodecConfig().setPulsingFreqCutoff(codecParam) : newCodecConfig()), cp);
+    swapCodec(new MockFixedIntBlockCodec(codecHasParam && "MockFixedIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 2000), newCodecConfig()), cp);
     // baseBlockSize cannot be over 127:
-    swapCodec(new MockVariableIntBlockCodec(codecHasParam && "MockVariableIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 127)), cp);
+    swapCodec(new MockVariableIntBlockCodec(codecHasParam && "MockVariableIntBlock".equals(codec) ? codecParam : _TestUtil.nextInt(random, 1, 127), newCodecConfig()), cp);
     swapCodec(new MockRandomCodec(random), cp);
 
     return cp.lookup(codec);
   }
+  
+  public static CodecConfig newCodecConfig() {
+    CodecConfig config = new CodecConfig();
+    config.setTermCacheSize(100  + random.nextInt(3000));
+    config.setPulsingFreqCutoff(1 + random.nextInt(20));
+    config.setDocValuesUseCFS(random.nextBoolean());
+    return config;
+  }
 
   // returns current PreFlex codec
   static void removeTestCodecs(Codec codec, CodecProvider cp) {
@@ -299,7 +308,7 @@
     cp.unregister(cp.lookup("MockFixedIntBlock"));
     cp.unregister(cp.lookup("MockVariableIntBlock"));
     cp.unregister(cp.lookup("MockRandom"));
-    swapCodec(new PulsingCodec(1), cp);
+    swapCodec(new PulsingCodec(newCodecConfig()), cp);
     cp.setDefaultFieldCodec(savedDefaultCodec);
   }
 
@@ -1464,11 +1473,11 @@
 
     RandomCodecProvider(Random random) {
       this.perFieldSeed = random.nextInt();
-      register(new StandardCodec());
+      register(new StandardCodec(newCodecConfig()));
       register(new PreFlexCodec());
-      register(new PulsingCodec(1));
-      register(new SimpleTextCodec());
-      register(new MemoryCodec());
+      register(new PulsingCodec(newCodecConfig()));
+      register(new SimpleTextCodec(newCodecConfig()));
+      register(new MemoryCodec(newCodecConfig()));
       Collections.shuffle(knownCodecs, random);
     }
 
Index: lucene/src/test/org/apache/lucene/TestExternalCodecs.java
===================================================================
--- lucene/src/test/org/apache/lucene/TestExternalCodecs.java	(revision 1141573)
+++ lucene/src/test/org/apache/lucene/TestExternalCodecs.java	(working copy)
@@ -24,8 +24,6 @@
 import org.apache.lucene.search.*;
 import org.apache.lucene.analysis.*;
 import org.apache.lucene.index.codecs.*;
-import org.apache.lucene.index.codecs.standard.*;
-import org.apache.lucene.index.codecs.pulsing.*;
 import org.apache.lucene.store.*;
 import java.util.*;
 import java.io.*;
@@ -75,7 +73,7 @@
   public static class RAMOnlyCodec extends Codec {
     
     public RAMOnlyCodec() {
-      name = "RamOnly";
+      super("RamOnly", new CodecConfig());
     }
     // Postings state:
     static class RAMPostings extends FieldsProducer {
Index: lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestAddIndexes.java	(revision 1141573)
+++ lucene/src/test/org/apache/lucene/index/TestAddIndexes.java	(working copy)
@@ -1147,7 +1147,7 @@
       IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
           new MockAnalyzer(random));
       CodecProvider provider = new CodecProvider();
-      provider.register(new StandardCodec());
+      provider.register(new StandardCodec(newCodecConfig()));
       conf.setCodecProvider(provider);
       IndexWriter w = new IndexWriter(toAdd, conf);
       Document doc = new Document();
@@ -1160,7 +1160,7 @@
       IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
           new MockAnalyzer(random));
       CodecProvider provider = new CodecProvider();
-      provider.register(new PulsingCodec(1 + random.nextInt(10)));
+      provider.register(new PulsingCodec(newCodecConfig()));
       conf.setCodecProvider(provider);
       IndexWriter w = new IndexWriter(dir, conf);
       try {
@@ -1181,7 +1181,7 @@
       IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
           new MockAnalyzer(random));
       CodecProvider provider = new CodecProvider();
-      provider.register(new PulsingCodec(1 + random.nextInt(10)));
+      provider.register(new PulsingCodec(newCodecConfig()));
       conf.setCodecProvider(provider);
       IndexWriter w = new IndexWriter(dir, conf);
       IndexReader indexReader = IndexReader.open(toAdd);
Index: lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestCompoundFile.java	(revision 1141573)
+++ lucene/src/test/org/apache/lucene/index/TestCompoundFile.java	(working copy)
@@ -21,8 +21,6 @@
 import java.io.File;
 
 import org.apache.lucene.util.LuceneTestCase;
-import junit.framework.TestSuite;
-import junit.textui.TestRunner;
 
 import org.apache.lucene.store.CompoundFileDirectory;
 import org.apache.lucene.store.IndexOutput;
@@ -35,27 +33,9 @@
 
 public class TestCompoundFile extends LuceneTestCase
 {
-    /** Main for running test case by itself. */
-    public static void main(String args[]) {
-        TestRunner.run (new TestSuite(TestCompoundFile.class));
-//        TestRunner.run (new TestCompoundFile("testSingleFile"));
-//        TestRunner.run (new TestCompoundFile("testTwoFiles"));
-//        TestRunner.run (new TestCompoundFile("testRandomFiles"));
-//        TestRunner.run (new TestCompoundFile("testClonedStreamsClosing"));
-//        TestRunner.run (new TestCompoundFile("testReadAfterClose"));
-//        TestRunner.run (new TestCompoundFile("testRandomAccess"));
-//        TestRunner.run (new TestCompoundFile("testRandomAccessClones"));
-//        TestRunner.run (new TestCompoundFile("testFileNotFound"));
-//        TestRunner.run (new TestCompoundFile("testReadPastEOF"));
 
-//        TestRunner.run (new TestCompoundFile("testIWCreate"));
-
-    }
-
-
     private Directory dir;
 
-
     @Override
     public void setUp() throws Exception {
        super.setUp();
@@ -717,5 +697,63 @@
     cfr.close();
     newDir.close();
   }
+  
+  public void testReadNestedCFP() throws IOException {
+    Directory newDir = newDirectory();
+    CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
+    CompoundFileDirectory nested = newDir.createCompoundOutput("b.cfs");
+    IndexOutput out = nested.createOutput("b.xyz");
+    IndexOutput out1 = nested.createOutput("b_1.xyz");
+    out.writeInt(0);
+    out1.writeInt(1);
+    out.close();
+    out1.close();
+    nested.close();
+    newDir.copy(csw, "b.cfs", "b.cfs");
+    newDir.copy(csw, "b.cfe", "b.cfe");
+    newDir.deleteFile("b.cfs");
+    newDir.deleteFile("b.cfe");
+    csw.close();
+    
+    assertEquals(2, newDir.listAll().length);
+    csw = newDir.openCompoundInput("d.cfs", 1024);
+    
+    assertEquals(2, csw.listAll().length);
+    nested = csw.openCompoundInput("b.cfs", 1024);
+    
+    assertEquals(2, nested.listAll().length);
+    IndexInput openInput = nested.openInput("b.xyz");
+    assertEquals(0, openInput.readInt());
+    openInput.close();
+    openInput = nested.openInput("b_1.xyz");
+    assertEquals(1, openInput.readInt());
+    openInput.close();
+    nested.close();
+    csw.close();
+    newDir.close();
+  }
+  
+  public void testDoubleClose() throws IOException {
+    Directory newDir = newDirectory();
+    CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
+    IndexOutput out = csw.createOutput("d.xyz");
+    out.writeInt(0);
+    out.close();
+    
+    csw.close();
+    // close a second time - must have no effect according to Closeable
+    csw.close();
+    
+    csw = newDir.openCompoundInput("d.cfs", 1024);
+    IndexInput openInput = csw.openInput("d.xyz");
+    assertEquals(0, openInput.readInt());
+    openInput.close();
+    csw.close();
+    // close a second time - must have no effect according to Closeable
+    csw.close();
+    
+    newDir.close();
+    
+  }
 
 }
Index: lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java	(revision 1141573)
+++ lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java	(working copy)
@@ -32,6 +32,7 @@
 import org.apache.lucene.index.codecs.BlockTermsReader;
 import org.apache.lucene.index.codecs.BlockTermsWriter;
 import org.apache.lucene.index.codecs.Codec;
+import org.apache.lucene.index.codecs.CodecConfig;
 import org.apache.lucene.index.codecs.CoreCodecProvider;
 import org.apache.lucene.index.codecs.DefaultDocValuesProducer;
 import org.apache.lucene.index.codecs.FieldsConsumer;
@@ -105,8 +106,9 @@
   }
 
   private static class StandardCodecWithOrds extends Codec {
-    public StandardCodecWithOrds() {
-      name = "StandardOrds";
+    
+    public StandardCodecWithOrds(CodecConfig config) {
+      super("StandardOrds", config);
     }
 
     @Override
@@ -200,13 +202,13 @@
       StandardPostingsReader.files(dir, segmentInfo, id, files);
       BlockTermsReader.files(dir, segmentInfo, id, files);
       FixedGapTermsIndexReader.files(dir, segmentInfo, id, files);
-      DefaultDocValuesConsumer.files(dir, segmentInfo, id, files);
+      DefaultDocValuesConsumer.files(dir, segmentInfo, id, files, config.getDocValuesUseCFS());
     }
 
     @Override
     public void getExtensions(Set<String> extensions) {
       getStandardExtensions(extensions);
-      DefaultDocValuesConsumer.getDocValuesExtensions(extensions);
+      DefaultDocValuesConsumer.getDocValuesExtensions(extensions, config.getDocValuesUseCFS());
     }
 
     public static void getStandardExtensions(Set<String> extensions) {
@@ -218,12 +220,12 @@
     
     @Override
     public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException {
-      return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator());
+      return new DefaultDocValuesConsumer(state, BytesRef.getUTF8SortedAsUnicodeComparator(), config.getDocValuesUseCFS());
     }
 
     @Override
     public PerDocValues docsProducer(SegmentReadState state) throws IOException {
-      return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId);
+      return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, config.getDocValuesUseCFS());
     }
   }
 
@@ -250,7 +252,7 @@
     if (random.nextInt(10) == 7) {
       // Make sure terms index has ords:
       CoreCodecProvider cp = new CoreCodecProvider();
-      cp.register(new StandardCodecWithOrds());
+      cp.register(new StandardCodecWithOrds(newCodecConfig()));
       cp.setDefaultFieldCodec("StandardOrds");
 
       // So checkIndex on close works
@@ -353,7 +355,7 @@
     if (random.nextInt(10) == 7) {
       // Make sure terms index has ords:
       CoreCodecProvider cp = new CoreCodecProvider();
-      cp.register(new StandardCodecWithOrds());
+      cp.register(new StandardCodecWithOrds(newCodecConfig()));
       cp.setDefaultFieldCodec("StandardOrds");
 
       // So checkIndex on close works
Index: lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java	(revision 1141573)
+++ lucene/src/test/org/apache/lucene/index/TestPerFieldCodecSupport.java	(working copy)
@@ -278,10 +278,10 @@
     for (int i = 0; i < numRounds; i++) {
       CodecProvider provider = new CodecProvider();
       Codec[] codecs = new Codec[] { new StandardCodec(),
-          new SimpleTextCodec(), new MockSepCodec(),
-          new PulsingCodec(1 + random.nextInt(10)),
-          new MockVariableIntBlockCodec(1 + random.nextInt(10)),
-          new MockFixedIntBlockCodec(1 + random.nextInt(10)) };
+          new SimpleTextCodec(newCodecConfig()), new MockSepCodec(newCodecConfig()),
+          new PulsingCodec(newCodecConfig()),
+          new MockVariableIntBlockCodec(1 + random.nextInt(10), newCodecConfig()),
+          new MockFixedIntBlockCodec(1 + random.nextInt(10), newCodecConfig()) };
       for (Codec codec : codecs) {
         provider.register(codec);
       }
Index: lucene/src/test/org/apache/lucene/index/codecs/TestCodecConfig.java
===================================================================
--- lucene/src/test/org/apache/lucene/index/codecs/TestCodecConfig.java	(revision 0)
+++ lucene/src/test/org/apache/lucene/index/codecs/TestCodecConfig.java	(revision 0)
@@ -0,0 +1,119 @@
+package org.apache.lucene.index.codecs;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.junit.Test;
+
+public class TestCodecConfig extends LuceneTestCase {
+
+  @Test
+  public void testDefaults() throws Exception {
+    CodecConfig conf = new CodecConfig();
+    assertEquals(CodecConfig.DEFAULT_PULSING_FREQ_CUTOFF,
+        conf.getPulsingFreqCutoff());
+    assertEquals(CodecConfig.DEFAULT_TERM_CACHE_SIZE, conf.getTermCacheSize());
+    assertTrue(conf.getDocValuesUseCFS());
+    assertSame(BytesRef.getUTF8SortedAsUnicodeComparator(),
+        conf.getDocValuesByteComparator());
+
+    // Sanity check - validate that all getters are covered.
+    Set<String> getters = new HashSet<String>();
+    getters.add("getDocValuesByteComparator");
+    getters.add("getDocValuesUseCFS");
+    getters.add("getTermCacheSize");
+    getters.add("getPulsingFreqCutoff");
+
+    for (Method m : CodecConfig.class.getDeclaredMethods()) {
+      if (m.getDeclaringClass() == CodecConfig.class
+          && m.getName().startsWith("get")) {
+        assertTrue("method " + m.getName() + " is not tested for defaults",
+            getters.contains(m.getName()));
+      }
+    }
+  }
+
+  @Test
+  public void testSettersChaining() throws Exception {
+    // Ensures that every setter returns IndexWriterConfig to enable easy
+    // chaining.
+    for (Method m : CodecConfig.class.getDeclaredMethods()) {
+      if (m.getDeclaringClass() == CodecConfig.class
+          && m.getName().startsWith("set")
+          && !Modifier.isStatic(m.getModifiers())) {
+        assertEquals("method " + m.getName()
+            + " does not return IndexWriterConfig", CodecConfig.class,
+            m.getReturnType());
+      }
+    }
+  }
+
+  @Test
+  public void testConstants() throws Exception {
+    // Tests that the values of the constants does not change
+    assertEquals(1, CodecConfig.DEFAULT_PULSING_FREQ_CUTOFF);
+    assertEquals(1024, CodecConfig.DEFAULT_TERM_CACHE_SIZE);
+  }
+
+  @Test
+  public void testToString() throws Exception {
+    String str = new CodecConfig().toString();
+    for (Field f : CodecConfig.class.getDeclaredFields()) {
+      int modifiers = f.getModifiers();
+      if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
+        // Skip static final fields, they are only constants
+        continue;
+      }
+      assertTrue(f.getName() + " not found in toString",
+          str.indexOf(f.getName()) != -1);
+    }
+  }
+
+  @Test
+  public void testClone() throws Exception {
+    CodecConfig conf = new CodecConfig();
+    CodecConfig clone = (CodecConfig) conf.clone();
+
+    // Clone is shallow since not all parameters are cloneable.
+    assertTrue(conf.getDocValuesByteComparator() == clone
+        .getDocValuesByteComparator());
+
+    conf.setPulsingFreqCutoff(5);
+    assertEquals(1, clone.getPulsingFreqCutoff());
+  }
+
+  @Test
+  public void testInvalidValues() throws Exception {
+    CodecConfig conf = new CodecConfig();
+
+    try {
+      conf.setTermCacheSize(0);
+      fail("should not have succeeded to set termCaseSize to 0");
+    } catch (IllegalArgumentException e) {
+      // this is expected
+    }
+
+  }
+}
Index: solr/src/test/org/apache/solr/core/MockCodecProviderFactory.java
===================================================================
--- solr/src/test/org/apache/solr/core/MockCodecProviderFactory.java	(revision 1141573)
+++ solr/src/test/org/apache/solr/core/MockCodecProviderFactory.java	(working copy)
@@ -42,7 +42,7 @@
   public CodecProvider create() {
     CodecProvider cp = new CodecProvider();
     cp.register(new StandardCodec());
-    cp.register(new PulsingCodec(1));
+    cp.register(new PulsingCodec());
     if (codecs != null) {
       for (Object codec : codecs.getAll("name")) {
         if (!cp.isCodecRegistered((String)codec)) {
