Index: lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/BlockTermsReader.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/BlockTermsReader.java	(revision 1492635)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/blockterms/BlockTermsReader.java	(working copy)
@@ -341,7 +341,8 @@
         indexEnum = indexReader.getFieldEnum(fieldInfo);
         doOrd = indexReader.supportsOrd();
         fieldTerm.field = fieldInfo.name;
-        state = postingsReader.newTermState();
+        state = new BlockTermState();
+        state.meta = postingsReader.newMetaData(fieldInfo);
         state.totalTermFreq = -1;
         state.ord = -1;
 
@@ -579,7 +580,7 @@
                   // Store in cache
                   decodeMetaData();
                   //System.out.println("  cache! state=" + state);
-                  termsCache.put(new FieldAndTerm(fieldTerm), (BlockTermState) state.clone());
+                  termsCache.put(new FieldAndTerm(fieldTerm), state.clone());
                 }
 
                 return SeekStatus.FOUND;
Index: lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingMetaData.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingMetaData.java	(revision 0)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingMetaData.java	(working copy)
@@ -0,0 +1,62 @@
+package org.apache.lucene.codecs.pulsing;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.lucene.codecs.BlockTermState;
+import org.apache.lucene.codecs.TermMetaData;
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.ArrayUtil;
+
+final class PulsingMetaData extends TermMetaData {
+  byte[] postings;
+  int postingsSize;                     // -1 if this term was not inlined
+  BlockTermState wrapped;
+
+  ByteArrayDataInput inlinedBytesReader;
+  byte[] inlinedBytes;
+
+  public PulsingMetaData() {
+  }
+
+  public PulsingMetaData(TermMetaData wrapped) {
+    this.wrapped = new BlockTermState();
+    this.wrapped.meta = wrapped;
+  }
+
+  @Override
+  public PulsingMetaData clone() {
+    PulsingMetaData clone = (PulsingMetaData) super.clone();
+    if (postingsSize != -1) {
+      clone.postings = new byte[postingsSize];
+      System.arraycopy(postings, 0, clone.postings, 0, postingsSize);
+    } else {
+      assert wrapped != null;
+      clone.wrapped = wrapped.clone();
+    }
+    return clone;
+  }
+
+  @Override
+  public void copyFrom(TermMetaData _other) {
+    super.copyFrom(_other);
+    PulsingMetaData other = (PulsingMetaData) _other;
+    postingsSize = other.postingsSize;
+    if (other.postingsSize != -1) {
+      if (postings == null || postings.length < other.postingsSize) {
+        postings = new byte[ArrayUtil.oversize(other.postingsSize, 1)];
+      }
+      System.arraycopy(other.postings, 0, postings, 0, other.postingsSize);
+    } else {
+      wrapped.copyFrom(other.wrapped);
+    }
+
+    // NOTE: we do not copy the
+    // inlinedBytes/inlinedBytesReader; these are only
+    // stored on the "primary" TermState.  They are
+    // "transient" to cloned term states.
+  }
+}
Index: lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java	(revision 1492635)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/pulsing/PulsingPostingsReader.java	(working copy)
@@ -29,6 +29,7 @@
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
 import org.apache.lucene.index.TermState;
+import org.apache.lucene.codecs.TermMetaData;
 import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.util.ArrayUtil;
@@ -64,88 +65,35 @@
     wrappedPostingsReader.init(termsIn);
   }
 
-  private static class PulsingTermState extends BlockTermState {
-    private byte[] postings;
-    private int postingsSize;                     // -1 if this term was not inlined
-    private BlockTermState wrappedTermState;
-
-    ByteArrayDataInput inlinedBytesReader;
-    private byte[] inlinedBytes;
-
-    @Override
-    public PulsingTermState clone() {
-      PulsingTermState clone;
-      clone = (PulsingTermState) super.clone();
-      if (postingsSize != -1) {
-        clone.postings = new byte[postingsSize];
-        System.arraycopy(postings, 0, clone.postings, 0, postingsSize);
-      } else {
-        assert wrappedTermState != null;
-        clone.wrappedTermState = (BlockTermState) wrappedTermState.clone();
-      }
-      return clone;
-    }
-
-    @Override
-    public void copyFrom(TermState _other) {
-      super.copyFrom(_other);
-      PulsingTermState other = (PulsingTermState) _other;
-      postingsSize = other.postingsSize;
-      if (other.postingsSize != -1) {
-        if (postings == null || postings.length < other.postingsSize) {
-          postings = new byte[ArrayUtil.oversize(other.postingsSize, 1)];
-        }
-        System.arraycopy(other.postings, 0, postings, 0, other.postingsSize);
-      } else {
-        wrappedTermState.copyFrom(other.wrappedTermState);
-      }
-
-      // NOTE: we do not copy the
-      // inlinedBytes/inlinedBytesReader; these are only
-      // stored on the "primary" TermState.  They are
-      // "transient" to cloned term states.
-    }
-
-    @Override
-    public String toString() {
-      if (postingsSize == -1) {
-        return "PulsingTermState: not inlined: wrapped=" + wrappedTermState;
-      } else {
-        return "PulsingTermState: inlined size=" + postingsSize + " " + super.toString();
-      }
-    }
-  }
-
   @Override
-  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState _termState) throws IOException {
+  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState termState) throws IOException {
     //System.out.println("PR.readTermsBlock state=" + _termState);
-    final PulsingTermState termState = (PulsingTermState) _termState;
-    if (termState.inlinedBytes == null) {
-      termState.inlinedBytes = new byte[128];
-      termState.inlinedBytesReader = new ByteArrayDataInput();
+    final PulsingMetaData meta = (PulsingMetaData) termState.meta;
+    if (meta.inlinedBytes == null) {
+      meta.inlinedBytes = new byte[128];
+      meta.inlinedBytesReader = new ByteArrayDataInput();
     }
     int len = termsIn.readVInt();
     //System.out.println("  len=" + len + " fp=" + termsIn.getFilePointer());
-    if (termState.inlinedBytes.length < len) {
-      termState.inlinedBytes = new byte[ArrayUtil.oversize(len, 1)];
+    if (meta.inlinedBytes.length < len) {
+      meta.inlinedBytes = new byte[ArrayUtil.oversize(len, 1)];
     }
-    termsIn.readBytes(termState.inlinedBytes, 0, len);
-    termState.inlinedBytesReader.reset(termState.inlinedBytes);
-    termState.wrappedTermState.termBlockOrd = 0;
-    wrappedPostingsReader.readTermsBlock(termsIn, fieldInfo, termState.wrappedTermState);
+    termsIn.readBytes(meta.inlinedBytes, 0, len);
+    meta.inlinedBytesReader.reset(meta.inlinedBytes);
+    meta.wrapped.termBlockOrd = 0;
+    wrappedPostingsReader.readTermsBlock(termsIn, fieldInfo, meta.wrapped);
   }
 
   @Override
-  public BlockTermState newTermState() throws IOException {
-    PulsingTermState state = new PulsingTermState();
-    state.wrappedTermState = wrappedPostingsReader.newTermState();
-    return state;
+  public TermMetaData newMetaData(FieldInfo info) throws IOException {
+    TermMetaData wrapped = wrappedPostingsReader.newMetaData(info);
+    return new PulsingMetaData(wrapped);
   }
 
   @Override
-  public void nextTerm(FieldInfo fieldInfo, BlockTermState _termState) throws IOException {
+  public void nextTerm(FieldInfo fieldInfo, BlockTermState termState) throws IOException {
     //System.out.println("PR nextTerm");
-    PulsingTermState termState = (PulsingTermState) _termState;
+    PulsingMetaData meta = (PulsingMetaData) termState.meta;
 
     // if we have positions, its total TF, otherwise its computed based on docFreq.
     long count = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 ? termState.totalTermFreq : termState.docFreq;
@@ -156,31 +104,31 @@
       // Inlined into terms dict -- just read the byte[] blob in,
       // but don't decode it now (we only decode when a DocsEnum
       // or D&PEnum is pulled):
-      termState.postingsSize = termState.inlinedBytesReader.readVInt();
-      if (termState.postings == null || termState.postings.length < termState.postingsSize) {
-        termState.postings = new byte[ArrayUtil.oversize(termState.postingsSize, 1)];
+      meta.postingsSize = meta.inlinedBytesReader.readVInt();
+      if (meta.postings == null || meta.postings.length < meta.postingsSize) {
+        meta.postings = new byte[ArrayUtil.oversize(meta.postingsSize, 1)];
       }
       // TODO: sort of silly to copy from one big byte[]
       // (the blob holding all inlined terms' blobs for
       // current term block) into another byte[] (just the
       // blob for this term)...
-      termState.inlinedBytesReader.readBytes(termState.postings, 0, termState.postingsSize);
-      //System.out.println("  inlined bytes=" + termState.postingsSize);
+      meta.inlinedBytesReader.readBytes(meta.postings, 0, meta.postingsSize);
+      //System.out.println("  inlined bytes=" + meta.postingsSize);
     } else {
       //System.out.println("  not inlined");
-      termState.postingsSize = -1;
+      meta.postingsSize = -1;
       // TODO: should we do full copyFrom?  much heavier...?
-      termState.wrappedTermState.docFreq = termState.docFreq;
-      termState.wrappedTermState.totalTermFreq = termState.totalTermFreq;
-      wrappedPostingsReader.nextTerm(fieldInfo, termState.wrappedTermState);
-      termState.wrappedTermState.termBlockOrd++;
+      meta.wrapped.docFreq = termState.docFreq;
+      meta.wrapped.totalTermFreq = termState.totalTermFreq;
+      wrappedPostingsReader.nextTerm(fieldInfo, meta.wrapped);
+      meta.wrapped.termBlockOrd++;
     }
   }
 
   @Override
-  public DocsEnum docs(FieldInfo field, BlockTermState _termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
-    PulsingTermState termState = (PulsingTermState) _termState;
-    if (termState.postingsSize != -1) {
+  public DocsEnum docs(FieldInfo field, BlockTermState termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
+    PulsingMetaData meta = (PulsingMetaData) termState.meta;
+    if (meta.postingsSize != -1) {
       PulsingDocsEnum postings;
       if (reuse instanceof PulsingDocsEnum) {
         postings = (PulsingDocsEnum) reuse;
@@ -202,22 +150,22 @@
       return postings.reset(liveDocs, termState);
     } else {
       if (reuse instanceof PulsingDocsEnum) {
-        DocsEnum wrapped = wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, getOther(reuse), flags);
+        DocsEnum wrapped = wrappedPostingsReader.docs(field, meta.wrapped, liveDocs, getOther(reuse), flags);
         setOther(wrapped, reuse); // wrapped.other = reuse
         return wrapped;
       } else {
-        return wrappedPostingsReader.docs(field, termState.wrappedTermState, liveDocs, reuse, flags);
+        return wrappedPostingsReader.docs(field, meta.wrapped, liveDocs, reuse, flags);
       }
     }
   }
 
   @Override
-  public DocsAndPositionsEnum docsAndPositions(FieldInfo field, BlockTermState _termState, Bits liveDocs, DocsAndPositionsEnum reuse,
+  public DocsAndPositionsEnum docsAndPositions(FieldInfo field, BlockTermState termState, Bits liveDocs, DocsAndPositionsEnum reuse,
                                                int flags) throws IOException {
 
-    final PulsingTermState termState = (PulsingTermState) _termState;
+    final PulsingMetaData meta = (PulsingMetaData) termState.meta;
 
-    if (termState.postingsSize != -1) {
+    if (meta.postingsSize != -1) {
       PulsingDocsAndPositionsEnum postings;
       if (reuse instanceof PulsingDocsAndPositionsEnum) {
         postings = (PulsingDocsAndPositionsEnum) reuse;
@@ -239,12 +187,12 @@
       return postings.reset(liveDocs, termState);
     } else {
       if (reuse instanceof PulsingDocsAndPositionsEnum) {
-        DocsAndPositionsEnum wrapped = wrappedPostingsReader.docsAndPositions(field, termState.wrappedTermState, liveDocs, (DocsAndPositionsEnum) getOther(reuse),
+        DocsAndPositionsEnum wrapped = wrappedPostingsReader.docsAndPositions(field, meta.wrapped, liveDocs, (DocsAndPositionsEnum) getOther(reuse),
                                                                               flags);
         setOther(wrapped, reuse); // wrapped.other = reuse
         return wrapped;
       } else {
-        return wrappedPostingsReader.docsAndPositions(field, termState.wrappedTermState, liveDocs, reuse, flags);
+        return wrappedPostingsReader.docsAndPositions(field, meta.wrapped, liveDocs, reuse, flags);
       }
     }
   }
@@ -268,19 +216,20 @@
       storeOffsets = indexOptions.compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
     }
 
-    public PulsingDocsEnum reset(Bits liveDocs, PulsingTermState termState) {
+    public PulsingDocsEnum reset(Bits liveDocs, BlockTermState termState) {
       //System.out.println("PR docsEnum termState=" + termState + " docFreq=" + termState.docFreq);
-      assert termState.postingsSize != -1;
+      PulsingMetaData meta = (PulsingMetaData)termState.meta;
+      assert meta.postingsSize != -1;
 
-      // Must make a copy of termState's byte[] so that if
+      // Must make a copy of meta's byte[] so that if
       // app does TermsEnum.next(), this DocsEnum is not affected
       if (postingsBytes == null) {
-        postingsBytes = new byte[termState.postingsSize];
-      } else if (postingsBytes.length < termState.postingsSize) {
-        postingsBytes = ArrayUtil.grow(postingsBytes, termState.postingsSize);
+        postingsBytes = new byte[meta.postingsSize];
+      } else if (postingsBytes.length < meta.postingsSize) {
+        postingsBytes = ArrayUtil.grow(postingsBytes, meta.postingsSize);
       }
-      System.arraycopy(termState.postings, 0, postingsBytes, 0, termState.postingsSize);
-      postings.reset(postingsBytes, 0, termState.postingsSize);
+      System.arraycopy(meta.postings, 0, postingsBytes, 0, meta.postingsSize);
+      postings.reset(postingsBytes, 0, meta.postingsSize);
       docID = -1;
       accum = 0;
       freq = 1;
@@ -404,15 +353,16 @@
       return indexOptions == fieldInfo.getIndexOptions() && storePayloads == fieldInfo.hasPayloads();
     }
 
-    public PulsingDocsAndPositionsEnum reset(Bits liveDocs, PulsingTermState termState) {
-      assert termState.postingsSize != -1;
+    public PulsingDocsAndPositionsEnum reset(Bits liveDocs, BlockTermState termState) {
+      PulsingMetaData meta = (PulsingMetaData) termState.meta;
+      assert meta.postingsSize != -1;
       if (postingsBytes == null) {
-        postingsBytes = new byte[termState.postingsSize];
-      } else if (postingsBytes.length < termState.postingsSize) {
-        postingsBytes = ArrayUtil.grow(postingsBytes, termState.postingsSize);
+        postingsBytes = new byte[meta.postingsSize];
+      } else if (postingsBytes.length < meta.postingsSize) {
+        postingsBytes = ArrayUtil.grow(postingsBytes, meta.postingsSize);
       }
-      System.arraycopy(termState.postings, 0, postingsBytes, 0, termState.postingsSize);
-      postings.reset(postingsBytes, 0, termState.postingsSize);
+      System.arraycopy(meta.postings, 0, postingsBytes, 0, meta.postingsSize);
+      postings.reset(postingsBytes, 0, meta.postingsSize);
       this.liveDocs = liveDocs;
       payloadLength = 0;
       posPending = 0;
Index: lucene/codecs/src/java/org/apache/lucene/codecs/sep/SepMetaData.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/sep/SepMetaData.java	(revision 0)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/sep/SepMetaData.java	(working copy)
@@ -0,0 +1,65 @@
+package org.apache.lucene.codecs.sep;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.lucene.codecs.BlockTermState;
+import org.apache.lucene.codecs.TermMetaData;
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.util.IOUtils;
+
+// nocommit: not actually implemented !! This file is just written
+// to pass compilation. We should refactor pulsing codec 
+// separately first, before changing to this.
+final class SepMetaData extends TermMetaData {
+  IntIndexInput.Index docIndex;
+  IntIndexInput.Index freqIndex;
+  IntIndexInput.Index posIndex;
+  long payloadFP;
+  long skipFP;
+
+  byte[] bytes;
+  ByteArrayDataInput bytesReader;
+
+  public SepMetaData() {
+  }
+  @Override
+  public SepMetaData clone() {
+    SepMetaData other = new SepMetaData();
+    other.copyFrom(this);
+    return other;
+  }
+  @Override
+  public void copyFrom(TermMetaData _other) {
+    SepMetaData other = (SepMetaData) _other;
+    if (docIndex == null) {
+      docIndex = other.docIndex.clone();
+    } else {
+      docIndex.copyFrom(other.docIndex);
+    }
+    if (other.freqIndex != null) {
+      if (freqIndex == null) {
+        freqIndex = other.freqIndex.clone();
+      } else {
+        freqIndex.copyFrom(other.freqIndex);
+      }
+    } else {
+      freqIndex = null;
+    }
+    if (other.posIndex != null) {
+      if (posIndex == null) {
+        posIndex = other.posIndex.clone();
+      } else {
+        posIndex.copyFrom(other.posIndex);
+      }
+    } else {
+      posIndex = null;
+    }
+    payloadFP = other.payloadFP;
+    skipFP = other.skipFP;
+  }
+}
+
+
Index: lucene/codecs/src/java/org/apache/lucene/codecs/sep/SepPostingsReader.java
===================================================================
--- lucene/codecs/src/java/org/apache/lucene/codecs/sep/SepPostingsReader.java	(revision 1492635)
+++ lucene/codecs/src/java/org/apache/lucene/codecs/sep/SepPostingsReader.java	(working copy)
@@ -22,6 +22,7 @@
 import org.apache.lucene.codecs.BlockTermState;
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.PostingsReaderBase;
+import org.apache.lucene.codecs.TermMetaData;
 import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
@@ -105,139 +106,76 @@
     IOUtils.close(freqIn, docIn, skipIn, posIn, payloadIn);
   }
 
-  private static final class SepTermState extends BlockTermState {
-    // We store only the seek point to the docs file because
-    // the rest of the info (freqIndex, posIndex, etc.) is
-    // stored in the docs file:
-    IntIndexInput.Index docIndex;
-    IntIndexInput.Index posIndex;
-    IntIndexInput.Index freqIndex;
-    long payloadFP;
-    long skipFP;
 
-    // Only used for "primary" term state; these are never
-    // copied on clone:
-    
-    // TODO: these should somehow be stored per-TermsEnum
-    // not per TermState; maybe somehow the terms dict
-    // should load/manage the byte[]/DataReader for us?
-    byte[] bytes;
-    ByteArrayDataInput bytesReader;
-
-    @Override
-    public SepTermState clone() {
-      SepTermState other = new SepTermState();
-      other.copyFrom(this);
-      return other;
-    }
-
-    @Override
-    public void copyFrom(TermState _other) {
-      super.copyFrom(_other);
-      SepTermState other = (SepTermState) _other;
-      if (docIndex == null) {
-        docIndex = other.docIndex.clone();
-      } else {
-        docIndex.copyFrom(other.docIndex);
-      }
-      if (other.freqIndex != null) {
-        if (freqIndex == null) {
-          freqIndex = other.freqIndex.clone();
-        } else {
-          freqIndex.copyFrom(other.freqIndex);
-        }
-      } else {
-        freqIndex = null;
-      }
-      if (other.posIndex != null) {
-        if (posIndex == null) {
-          posIndex = other.posIndex.clone();
-        } else {
-          posIndex.copyFrom(other.posIndex);
-        }
-      } else {
-        posIndex = null;
-      }
-      payloadFP = other.payloadFP;
-      skipFP = other.skipFP;
-    }
-
-    @Override
-    public String toString() {
-      return super.toString() + " docIndex=" + docIndex + " freqIndex=" + freqIndex + " posIndex=" + posIndex + " payloadFP=" + payloadFP + " skipFP=" + skipFP;
-    }
-  }
-
   @Override
-  public BlockTermState newTermState() throws IOException {
-    final SepTermState state = new SepTermState();
-    state.docIndex = docIn.index();
+  public TermMetaData newMetaData(FieldInfo info) throws IOException {
+    final SepMetaData meta = new SepMetaData();
+    meta.docIndex = docIn.index();
     if (freqIn != null) {
-      state.freqIndex = freqIn.index();
+      meta.freqIndex = freqIn.index();
     }
     if (posIn != null) {
-      state.posIndex = posIn.index();
+      meta.posIndex = posIn.index();
     }
-    return state;
+    return meta;
   }
 
   @Override
-  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState _termState) throws IOException {
-    final SepTermState termState = (SepTermState) _termState;
+  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState termState) throws IOException {
+    final SepMetaData meta = (SepMetaData) termState.meta;
     //System.out.println("SEPR: readTermsBlock termsIn.fp=" + termsIn.getFilePointer());
     final int len = termsIn.readVInt();
     //System.out.println("  numBytes=" + len);
-    if (termState.bytes == null) {
-      termState.bytes = new byte[ArrayUtil.oversize(len, 1)];
-      termState.bytesReader = new ByteArrayDataInput(termState.bytes);
-    } else if (termState.bytes.length < len) {
-      termState.bytes = new byte[ArrayUtil.oversize(len, 1)];
+    if (meta.bytes == null) {
+      meta.bytes = new byte[ArrayUtil.oversize(len, 1)];
+      meta.bytesReader = new ByteArrayDataInput(meta.bytes);
+    } else if (meta.bytes.length < len) {
+      meta.bytes = new byte[ArrayUtil.oversize(len, 1)];
     }
-    termState.bytesReader.reset(termState.bytes, 0, len);
-    termsIn.readBytes(termState.bytes, 0, len);
+    meta.bytesReader.reset(meta.bytes, 0, len);
+    termsIn.readBytes(meta.bytes, 0, len);
   }
 
   @Override
-  public void nextTerm(FieldInfo fieldInfo, BlockTermState _termState) throws IOException {
-    final SepTermState termState = (SepTermState) _termState;
+  public void nextTerm(FieldInfo fieldInfo, BlockTermState termState) throws IOException {
+    final SepMetaData meta = (SepMetaData) termState.meta;
     final boolean isFirstTerm = termState.termBlockOrd == 0;
-    //System.out.println("SEPR.nextTerm termCount=" + termState.termBlockOrd + " isFirstTerm=" + isFirstTerm + " bytesReader.pos=" + termState.bytesReader.getPosition());
+    //System.out.println("SEPR.nextTerm termCount=" + termState.termBlockOrd + " isFirstTerm=" + isFirstTerm + " bytesReader.pos=" + meta.bytesReader.getPosition());
     //System.out.println("  docFreq=" + termState.docFreq);
-    termState.docIndex.read(termState.bytesReader, isFirstTerm);
-    //System.out.println("  docIndex=" + termState.docIndex);
+    meta.docIndex.read(meta.bytesReader, isFirstTerm);
+    //System.out.println("  docIndex=" + meta.docIndex);
     if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) {
-      termState.freqIndex.read(termState.bytesReader, isFirstTerm);
+      meta.freqIndex.read(meta.bytesReader, isFirstTerm);
       if (fieldInfo.getIndexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) {
-        //System.out.println("  freqIndex=" + termState.freqIndex);
-        termState.posIndex.read(termState.bytesReader, isFirstTerm);
-        //System.out.println("  posIndex=" + termState.posIndex);
+        //System.out.println("  freqIndex=" + meta.freqIndex);
+        meta.posIndex.read(meta.bytesReader, isFirstTerm);
+        //System.out.println("  posIndex=" + meta.posIndex);
         if (fieldInfo.hasPayloads()) {
           if (isFirstTerm) {
-            termState.payloadFP = termState.bytesReader.readVLong();
+            meta.payloadFP = meta.bytesReader.readVLong();
           } else {
-            termState.payloadFP += termState.bytesReader.readVLong();
+            meta.payloadFP += meta.bytesReader.readVLong();
           }
-          //System.out.println("  payloadFP=" + termState.payloadFP);
+          //System.out.println("  payloadFP=" + meta.payloadFP);
         }
       }
     }
 
     if (termState.docFreq >= skipMinimum) {
-      //System.out.println("   readSkip @ " + termState.bytesReader.getPosition());
+      //System.out.println("   readSkip @ " + meta.bytesReader.getPosition());
       if (isFirstTerm) {
-        termState.skipFP = termState.bytesReader.readVLong();
+        meta.skipFP = meta.bytesReader.readVLong();
       } else {
-        termState.skipFP += termState.bytesReader.readVLong();
+        meta.skipFP += meta.bytesReader.readVLong();
       }
-      //System.out.println("  skipFP=" + termState.skipFP);
+      //System.out.println("  skipFP=" + meta.skipFP);
     } else if (isFirstTerm) {
-      termState.skipFP = 0;
+      meta.skipFP = 0;
     }
   }
 
   @Override
-  public DocsEnum docs(FieldInfo fieldInfo, BlockTermState _termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
-    final SepTermState termState = (SepTermState) _termState;
+  public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
     SepDocsEnum docsEnum;
     if (reuse == null || !(reuse instanceof SepDocsEnum)) {
       docsEnum = new SepDocsEnum();
@@ -255,12 +193,11 @@
   }
 
   @Override
-  public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState _termState, Bits liveDocs,
+  public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs,
                                                DocsAndPositionsEnum reuse, int flags)
     throws IOException {
 
     assert fieldInfo.getIndexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
-    final SepTermState termState = (SepTermState) _termState;
     SepDocsAndPositionsEnum postingsEnum;
     if (reuse == null || !(reuse instanceof SepDocsAndPositionsEnum)) {
       postingsEnum = new SepDocsAndPositionsEnum();
@@ -322,7 +259,8 @@
       }
     }
 
-    SepDocsEnum init(FieldInfo fieldInfo, SepTermState termState, Bits liveDocs) throws IOException {
+    SepDocsEnum init(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs) throws IOException {
+      SepMetaData meta = (SepMetaData) termState.meta;
       this.liveDocs = liveDocs;
       this.indexOptions = fieldInfo.getIndexOptions();
       omitTF = indexOptions == IndexOptions.DOCS_ONLY;
@@ -330,17 +268,17 @@
 
       // TODO: can't we only do this if consumer
       // skipped consuming the previous docs?
-      docIndex.copyFrom(termState.docIndex);
+      docIndex.copyFrom(meta.docIndex);
       docIndex.seek(docReader);
 
       if (!omitTF) {
-        freqIndex.copyFrom(termState.freqIndex);
+        freqIndex.copyFrom(meta.freqIndex);
         freqIndex.seek(freqReader);
       }
 
       docFreq = termState.docFreq;
       // NOTE: unused if docFreq < skipMinimum:
-      skipFP = termState.skipFP;
+      skipFP = meta.skipFP;
       count = 0;
       doc = -1;
       accum = 0;
@@ -492,28 +430,29 @@
       payloadIn = SepPostingsReader.this.payloadIn.clone();
     }
 
-    SepDocsAndPositionsEnum init(FieldInfo fieldInfo, SepTermState termState, Bits liveDocs) throws IOException {
+    SepDocsAndPositionsEnum init(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs) throws IOException {
+      SepMetaData meta = (SepMetaData) termState.meta;
       this.liveDocs = liveDocs;
       storePayloads = fieldInfo.hasPayloads();
       //System.out.println("Sep D&P init");
 
       // TODO: can't we only do this if consumer
       // skipped consuming the previous docs?
-      docIndex.copyFrom(termState.docIndex);
+      docIndex.copyFrom(meta.docIndex);
       docIndex.seek(docReader);
       //System.out.println("  docIndex=" + docIndex);
 
-      freqIndex.copyFrom(termState.freqIndex);
+      freqIndex.copyFrom(meta.freqIndex);
       freqIndex.seek(freqReader);
       //System.out.println("  freqIndex=" + freqIndex);
 
-      posIndex.copyFrom(termState.posIndex);
+      posIndex.copyFrom(meta.posIndex);
       //System.out.println("  posIndex=" + posIndex);
       posSeekPending = true;
       payloadPending = false;
 
-      payloadFP = termState.payloadFP;
-      skipFP = termState.skipFP;
+      payloadFP = meta.payloadFP;
+      skipFP = meta.skipFP;
       //System.out.println("  skipFP=" + skipFP);
 
       docFreq = termState.docFreq;
Index: lucene/core/src/java/org/apache/lucene/codecs/BlockTermState.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/BlockTermState.java	(revision 1492635)
+++ lucene/core/src/java/org/apache/lucene/codecs/BlockTermState.java	(working copy)
@@ -34,14 +34,24 @@
   /** the term's ord in the current block */
   public int termBlockOrd;
   /** fp into the terms dict primary file (_X.tim) that holds this term */
+  // TODO: only used in BlockTermDict
   public long blockFilePointer;
 
+  public TermMetaData meta;
+
   /** Sole constructor. (For invocation by subclass 
    *  constructors, typically implicit.) */
-  protected BlockTermState() {
+  public BlockTermState() {
   }
 
   @Override
+  public BlockTermState clone() {
+    BlockTermState other = new BlockTermState();
+    other.copyFrom(this);
+    return other;
+  }
+
+  @Override
   public void copyFrom(TermState _other) {
     assert _other instanceof BlockTermState : "can not copy from " + _other.getClass().getName();
     BlockTermState other = (BlockTermState) _other;
@@ -50,14 +60,11 @@
     totalTermFreq = other.totalTermFreq;
     termBlockOrd = other.termBlockOrd;
     blockFilePointer = other.blockFilePointer;
-
-    // NOTE: don't copy blockTermCount;
-    // it's "transient": used only by the "primary"
-    // termState, and regenerated on seek by TermState
+    meta = other.meta.clone();
   }
 
   @Override
   public String toString() {
-    return "docFreq=" + docFreq + " totalTermFreq=" + totalTermFreq + " termBlockOrd=" + termBlockOrd + " blockFP=" + blockFilePointer;
+    return "docFreq=" + docFreq + " totalTermFreq=" + totalTermFreq + " termBlockOrd=" + termBlockOrd + " blockFP=" + blockFilePointer+" "+meta.toString();
   }
 }
Index: lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java	(revision 1492635)
+++ lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsReader.java	(working copy)
@@ -27,6 +27,7 @@
 import java.util.Locale;
 import java.util.TreeMap;
 
+import org.apache.lucene.codecs.TermMetaData;
 import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.DocsEnum;
@@ -92,6 +93,7 @@
   private final IndexInput in;
 
   //private static final boolean DEBUG = BlockTreeTermsWriter.DEBUG;
+  private static final boolean DEBUG = true;
 
   // Reads the terms dict entries, to gather state to
   // produce DocsEnum on demand
@@ -468,9 +470,9 @@
       this.docCount = docCount;
       this.indexStartFP = indexStartFP;
       this.rootCode = rootCode;
-      // if (DEBUG) {
-      //   System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor);
-      // }
+      //if (DEBUG) {
+      //System.out.println("BTTR: seg=" + segment + " field=" + fieldInfo.name + " rootBlockCode=" + rootCode + " divisor=" + indexDivisor);
+      //}
 
       rootBlockFP = (new ByteArrayDataInput(rootCode.bytes, rootCode.offset, rootCode.length)).readVLong() >>> BlockTreeTermsWriter.OUTPUT_FLAGS_NUM_BITS;
 
@@ -579,7 +581,7 @@
         long lastSubFP;
 
         // State in automaton
-        int state;
+        int fsaState;
 
         int metaDataUpto;
 
@@ -617,7 +619,7 @@
 
         FST.Arc<BytesRef> arc;
 
-        final BlockTermState termState;
+        final BlockTermState state;
 
         // Cumulative output so far
         BytesRef outputPrefix;
@@ -627,8 +629,9 @@
 
         public Frame(int ord) throws IOException {
           this.ord = ord;
-          termState = postingsReader.newTermState();
-          termState.totalTermFreq = -1;
+          this.state = new BlockTermState();
+          this.state.totalTermFreq = -1;
+          this.state.meta = postingsReader.newMetaData(fieldInfo);
         }
 
         void loadNextFloorBlock() throws IOException {
@@ -638,22 +641,22 @@
           do {
             fp = fpOrig + (floorDataReader.readVLong() >>> 1);
             numFollowFloorBlocks--;
-            // if (DEBUG) System.out.println("    skip floor block2!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[transitionIndex].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
+            //if (DEBUG) System.out.println("    skip floor block2!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[transitionIndex].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
             if (numFollowFloorBlocks != 0) {
               nextFloorLabel = floorDataReader.readByte() & 0xff;
             } else {
               nextFloorLabel = 256;
             }
-            // if (DEBUG) System.out.println("    nextFloorLabel=" + (char) nextFloorLabel);
+            //if (DEBUG) System.out.println("    nextFloorLabel=" + (char) nextFloorLabel);
           } while (numFollowFloorBlocks != 0 && nextFloorLabel <= transitions[transitionIndex].getMin());
 
           load(null);
         }
 
-        public void setState(int state) {
-          this.state = state;
+        public void setState(int fsaState) {
+          this.fsaState = fsaState;
           transitionIndex = 0;
-          transitions = compiledAutomaton.sortedTransitions[state];
+          transitions = compiledAutomaton.sortedTransitions[fsaState];
           if (transitions.length != 0) {
             curTransitionMax = transitions[0].getMax();
           } else {
@@ -663,7 +666,7 @@
 
         void load(BytesRef frameIndexData) throws IOException {
 
-          // if (DEBUG) System.out.println("    load fp=" + fp + " fpOrig=" + fpOrig + " frameIndexData=" + frameIndexData + " trans=" + (transitions.length != 0 ? transitions[0] : "n/a" + " state=" + state));
+          //if (DEBUG) System.out.println("    load fp=" + fp + " fpOrig=" + fpOrig + " frameIndexData=" + frameIndexData + " trans=" + (transitions.length != 0 ? transitions[0] : "n/a" + " state=" + state));
 
           if (frameIndexData != null && transitions.length != 0) {
             // Floor frame
@@ -678,16 +681,16 @@
             if ((code & BlockTreeTermsWriter.OUTPUT_FLAG_IS_FLOOR) != 0) {
               numFollowFloorBlocks = floorDataReader.readVInt();
               nextFloorLabel = floorDataReader.readByte() & 0xff;
-              // if (DEBUG) System.out.println("    numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + nextFloorLabel);
+              //if (DEBUG) System.out.println("    numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + nextFloorLabel);
 
               // If current state is accept, we must process
               // first block in case it has empty suffix:
-              if (!runAutomaton.isAccept(state)) {
+              if (!runAutomaton.isAccept(fsaState)) {
                 // Maybe skip floor blocks:
                 while (numFollowFloorBlocks != 0 && nextFloorLabel <= transitions[0].getMin()) {
                   fp = fpOrig + (floorDataReader.readVLong() >>> 1);
                   numFollowFloorBlocks--;
-                  // if (DEBUG) System.out.println("    skip floor block!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[0].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
+                  //if (DEBUG) System.out.println("    skip floor block!  nextFloorLabel=" + (char) nextFloorLabel + " vs target=" + (char) transitions[0].getMin() + " newFP=" + fp + " numFollowFloorBlocks=" + numFollowFloorBlocks);
                   if (numFollowFloorBlocks != 0) {
                     nextFloorLabel = floorDataReader.readByte() & 0xff;
                   } else {
@@ -708,7 +711,7 @@
           code = in.readVInt();
           isLeafBlock = (code & 1) != 0;
           int numBytes = code >>> 1;
-          // if (DEBUG) System.out.println("      entCount=" + entCount + " lastInFloor?=" + isLastInFloor + " leafBlock?=" + isLeafBlock + " numSuffixBytes=" + numBytes);
+          //if (DEBUG) System.out.println("      entCount=" + entCount + " lastInFloor?=" + isLastInFloor + " leafBlock?=" + isLeafBlock + " numSuffixBytes=" + numBytes);
           if (suffixBytes.length < numBytes) {
             suffixBytes = new byte[ArrayUtil.oversize(numBytes, 1)];
           }
@@ -724,10 +727,10 @@
           statsReader.reset(statBytes, 0, numBytes);
           metaDataUpto = 0;
 
-          termState.termBlockOrd = 0;
+          state.termBlockOrd = 0;
           nextEnt = 0;
           
-          postingsReader.readTermsBlock(in, fieldInfo, termState);
+          postingsReader.readTermsBlock(in, fieldInfo, state);
 
           if (!isLastInFloor) {
             // Sub-blocks of a single floor block are always
@@ -763,7 +766,7 @@
           suffixesReader.skipBytes(suffix);
           if ((code & 1) == 0) {
             // A normal term
-            termState.termBlockOrd++;
+            state.termBlockOrd++;
             return false;
           } else {
             // A sub-block; make sub-FP absolute:
@@ -773,7 +776,7 @@
         }
 
         public int getTermBlockOrd() {
-          return isLeafBlock ? nextEnt : termState.termBlockOrd;
+          return isLeafBlock ? nextEnt : state.termBlockOrd;
         }
 
         public void decodeMetaData() throws IOException {
@@ -784,7 +787,7 @@
 
           // We must set/incr state.termCount because
           // postings impl can look at this
-          termState.termBlockOrd = metaDataUpto;
+          state.termBlockOrd = metaDataUpto;
       
           // TODO: better API would be "jump straight to term=N"???
           while (metaDataUpto < limit) {
@@ -797,16 +800,16 @@
 
             // TODO: if docFreq were bulk decoded we could
             // just skipN here:
-            termState.docFreq = statsReader.readVInt();
+            state.docFreq = statsReader.readVInt();
             //if (DEBUG) System.out.println("    dF=" + state.docFreq);
             if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) {
-              termState.totalTermFreq = termState.docFreq + statsReader.readVLong();
+              state.totalTermFreq = state.docFreq + statsReader.readVLong();
               //if (DEBUG) System.out.println("    totTF=" + state.totalTermFreq);
             }
 
-            postingsReader.nextTerm(fieldInfo, termState);
+            postingsReader.nextTerm(fieldInfo, state);
             metaDataUpto++;
-            termState.termBlockOrd++;
+            state.termBlockOrd++;
           }
         }
       }
@@ -816,9 +819,9 @@
       // TODO: in some cases we can filter by length?  eg
       // regexp foo*bar must be at least length 6 bytes
       public IntersectEnum(CompiledAutomaton compiled, BytesRef startTerm) throws IOException {
-        // if (DEBUG) {
-        //   System.out.println("\nintEnum.init seg=" + segment + " commonSuffix=" + brToString(compiled.commonSuffixRef));
-        // }
+        //if (DEBUG) {
+        //System.out.println("\nintEnum.init seg=" + segment + " commonSuffix=" + brToString(compiled.commonSuffixRef));
+        //}
         runAutomaton = compiled.runAutomaton;
         compiledAutomaton = compiled;
         in = BlockTreeTermsReader.this.in.clone();
@@ -872,9 +875,13 @@
       }
 
       @Override
+      // nocommit: currently we fully copy state to upper level
+      // i.e. meta data bytes is also copied, is it necessary?
       public TermState termState() throws IOException {
         currentFrame.decodeMetaData();
-        return currentFrame.termState.clone();
+        TermState ts = currentFrame.state.clone();
+        //if (DEBUG) System.out.println("BTTR.termState seg=" + segment + " state=" + ts);
+        return ts;
       }
 
       private Frame getFrame(int ord) throws IOException {
@@ -908,7 +915,7 @@
         
         f.fp = f.fpOrig = currentFrame.lastSubFP;
         f.prefix = currentFrame.prefix + currentFrame.suffix;
-        // if (DEBUG) System.out.println("    pushFrame state=" + state + " prefix=" + f.prefix);
+        //if (DEBUG) System.out.println("    pushFrame state=" + state + " prefix=" + f.prefix);
         f.setState(state);
 
         // Walk the arc through the index -- we only
@@ -946,20 +953,20 @@
       public int docFreq() throws IOException {
         //if (DEBUG) System.out.println("BTIR.docFreq");
         currentFrame.decodeMetaData();
-        //if (DEBUG) System.out.println("  return " + currentFrame.termState.docFreq);
-        return currentFrame.termState.docFreq;
+        //if (DEBUG) System.out.println("  return " + currentFrame.state.docFreq);
+        return currentFrame.state.docFreq;
       }
 
       @Override
       public long totalTermFreq() throws IOException {
         currentFrame.decodeMetaData();
-        return currentFrame.termState.totalTermFreq;
+        return currentFrame.state.totalTermFreq;
       }
 
       @Override
       public DocsEnum docs(Bits skipDocs, DocsEnum reuse, int flags) throws IOException {
         currentFrame.decodeMetaData();
-        return postingsReader.docs(fieldInfo, currentFrame.termState, skipDocs, reuse, flags);
+        return postingsReader.docs(fieldInfo, currentFrame.state, skipDocs, reuse, flags);
       }
 
       @Override
@@ -970,16 +977,16 @@
         }
 
         currentFrame.decodeMetaData();
-        return postingsReader.docsAndPositions(fieldInfo, currentFrame.termState, skipDocs, reuse, flags);
+        return postingsReader.docsAndPositions(fieldInfo, currentFrame.state, skipDocs, reuse, flags);
       }
 
       private int getState() {
-        int state = currentFrame.state;
+        int fsaState = currentFrame.fsaState;
         for(int idx=0;idx<currentFrame.suffix;idx++) {
-          state = runAutomaton.step(state,  currentFrame.suffixBytes[currentFrame.startBytePos+idx] & 0xff);
-          assert state != -1;
+          fsaState = runAutomaton.step(fsaState,  currentFrame.suffixBytes[currentFrame.startBytePos+idx] & 0xff);
+          assert fsaState != -1;
         }
-        return state;
+        return fsaState;
       }
 
       // NOTE: specialized to only doing the first-time
@@ -1002,7 +1009,7 @@
             final int saveStartBytePos = currentFrame.startBytePos;
             final int saveSuffix = currentFrame.suffix;
             final long saveLastSubFP = currentFrame.lastSubFP;
-            final int saveTermBlockOrd = currentFrame.termState.termBlockOrd;
+            final int saveTermBlockOrd = currentFrame.state.termBlockOrd;
 
             final boolean isSubBlock = currentFrame.next();
 
@@ -1045,7 +1052,7 @@
                 currentFrame.startBytePos = saveStartBytePos;
                 currentFrame.suffix = saveSuffix;
                 currentFrame.suffixesReader.setPosition(savePos);
-                currentFrame.termState.termBlockOrd = saveTermBlockOrd;
+                currentFrame.state.termBlockOrd = saveTermBlockOrd;
                 System.arraycopy(currentFrame.suffixBytes, currentFrame.startBytePos, term.bytes, currentFrame.prefix, currentFrame.suffix);
                 term.length = currentFrame.prefix + currentFrame.suffix;
                 // If the last entry was a block we don't
@@ -1065,9 +1072,9 @@
       public BytesRef next() throws IOException {
 
         // if (DEBUG) {
-        //   System.out.println("\nintEnum.next seg=" + segment);
-        //   System.out.println("  frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
-        // }
+        //System.out.println("\nintEnum.next seg=" + segment);
+        //System.out.println("  frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
+        //}
 
         nextTerm:
         while(true) {
@@ -1095,8 +1102,8 @@
           //   suffixRef.bytes = currentFrame.suffixBytes;
           //   suffixRef.offset = currentFrame.startBytePos;
           //   suffixRef.length = currentFrame.suffix;
-          //   System.out.println("    " + (isSubBlock ? "sub-block" : "term") + " " + currentFrame.nextEnt + " (of " + currentFrame.entCount + ") suffix=" + brToString(suffixRef));
-          // }
+          //System.out.println("    " + (isSubBlock ? "sub-block" : "term") + " " + currentFrame.nextEnt + " (of " + currentFrame.entCount + ") suffix=" + brToString(suffixRef));
+          //}
 
           if (currentFrame.suffix != 0) {
             final int label = currentFrame.suffixBytes[currentFrame.startBytePos] & 0xff;
@@ -1123,9 +1130,7 @@
             final int termLen = currentFrame.prefix + currentFrame.suffix;
             if (termLen < compiledAutomaton.commonSuffixRef.length) {
               // No match
-              // if (DEBUG) {
-              //   System.out.println("      skip: common suffix length");
-              // }
+              //if (DEBUG) System.out.println("      skip: common suffix length");
               continue nextTerm;
             }
 
@@ -1147,9 +1152,7 @@
               final int termBytesPosEnd = currentFrame.prefix;
               while (termBytesPos < termBytesPosEnd) {
                 if (termBytes[termBytesPos++] != commonSuffixBytes[commonSuffixBytesPos++]) {
-                  // if (DEBUG) {
-                  //   System.out.println("      skip: common suffix mismatch (in prefix)");
-                  // }
+                  //if (DEBUG) System.out.println("      skip: common suffix mismatch (in prefix)");
                   continue nextTerm;
                 }
               }
@@ -1162,9 +1165,7 @@
             final int commonSuffixBytesPosEnd = compiledAutomaton.commonSuffixRef.length;
             while (commonSuffixBytesPos < commonSuffixBytesPosEnd) {
               if (suffixBytes[suffixBytesPos++] != commonSuffixBytes[commonSuffixBytesPos++]) {
-                // if (DEBUG) {
-                //   System.out.println("      skip: common suffix mismatch");
-                // }
+                //if (DEBUG) System.out.println("      skip: common suffix mismatch");
                 continue nextTerm;
               }
             }
@@ -1177,31 +1178,31 @@
           // until the limit
 
           // See if the term prefix matches the automaton:
-          int state = currentFrame.state;
+          int fsaState = currentFrame.fsaState;
           for (int idx=0;idx<currentFrame.suffix;idx++) {
-            state = runAutomaton.step(state,  currentFrame.suffixBytes[currentFrame.startBytePos+idx] & 0xff);
-            if (state == -1) {
+            fsaState = runAutomaton.step(fsaState,  currentFrame.suffixBytes[currentFrame.startBytePos+idx] & 0xff);
+            if (fsaState == -1) {
               // No match
-              //System.out.println("    no s=" + state);
+              //System.out.println("    no s=" + fsaState);
               continue nextTerm;
             } else {
-              //System.out.println("    c s=" + state);
+              //System.out.println("    c s=" + fsaState);
             }
           }
 
           if (isSubBlock) {
             // Match!  Recurse:
-            //if (DEBUG) System.out.println("      sub-block match to state=" + state + "; recurse fp=" + currentFrame.lastSubFP);
+            //if (DEBUG) System.out.println("      sub-block match to state=" + fsaState + "; recurse fp=" + currentFrame.lastSubFP);
             copyTerm();
-            currentFrame = pushFrame(state);
-            //if (DEBUG) System.out.println("\n  frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.state + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
-          } else if (runAutomaton.isAccept(state)) {
+            currentFrame = pushFrame(fsaState);
+            //if (DEBUG) System.out.println("\n  frame ord=" + currentFrame.ord + " prefix=" + brToString(new BytesRef(term.bytes, term.offset, currentFrame.prefix)) + " state=" + currentFrame.fsaState + " lastInFloor?=" + currentFrame.isLastInFloor + " fp=" + currentFrame.fp + " trans=" + (currentFrame.transitions.length == 0 ? "n/a" : currentFrame.transitions[currentFrame.transitionIndex]) + " outputPrefix=" + currentFrame.outputPrefix);
+          } else if (runAutomaton.isAccept(fsaState)) {
             copyTerm();
-            //if (DEBUG) System.out.println("      term match to state=" + state + "; return term=" + brToString(term));
+            //if (DEBUG) System.out.println("      term match to state=" + fsaState + "; return term=" + brToString(term));
             assert savedStartTerm == null || term.compareTo(savedStartTerm) > 0: "saveStartTerm=" + savedStartTerm.utf8ToString() + " term=" + term.utf8ToString();
             return term;
           } else {
-            //System.out.println("    no s=" + state);
+            //System.out.println("    no s=" + fsaState);
           }
         }
       }
@@ -1299,12 +1300,12 @@
         //currentFrame = pushFrame(arc, rootCode, 0);
         //currentFrame.loadBlock();
         validIndexPrefix = 0;
-        // if (DEBUG) {
-        //   System.out.println("init frame state " + currentFrame.ord);
+        //if (DEBUG) {
+        //System.out.println("init frame state " + currentFrame.ord);
         //   printSeekState();
-        // }
+        //}
 
-        //System.out.println();
+        // System.out.println();
         // computeBlockStats().print(System.out);
       }
       
@@ -1361,9 +1362,7 @@
               final long lastFP = currentFrame.fpOrig;
               currentFrame = stack[currentFrame.ord-1];
               assert lastFP == currentFrame.lastSubFP;
-              // if (DEBUG) {
-              //   System.out.println("  reset validIndexPrefix=" + validIndexPrefix);
-              // }
+              //if (DEBUG) System.out.println("  reset validIndexPrefix=" + validIndexPrefix);
             }
           }
 
@@ -1464,9 +1463,7 @@
           if (f.prefix > targetBeforeCurrentLength) {
             f.rewind();
           } else {
-            // if (DEBUG) {
-            //   System.out.println("        skip rewind!");
-            // }
+            //if (DEBUG) System.out.println("        skip rewind!");
           }
           assert length == f.prefix;
         } else {
@@ -1475,12 +1472,12 @@
           f.state.termBlockOrd = 0;
           f.fpOrig = f.fp = fp;
           f.lastSubFP = -1;
-          // if (DEBUG) {
+          //if (DEBUG) {
           //   final int sav = term.length;
           //   term.length = length;
-          //   System.out.println("      push new frame ord=" + f.ord + " fp=" + f.fp + " hasTerms=" + f.hasTerms + " isFloor=" + f.isFloor + " pref=" + brToString(term));
+          //System.out.println("      push new frame ord=" + f.ord + " fp=" + f.fp + " hasTerms=" + f.hasTerms + " isFloor=" + f.isFloor + " pref=" + brToString(term));
           //   term.length = sav;
-          // }
+          //}
         }
 
         return f;
@@ -1511,10 +1508,10 @@
 
         assert clearEOF();
 
-        // if (DEBUG) {
-        //   System.out.println("\nBTTR.seekExact seg=" + segment + " target=" + fieldInfo.name + ":" + brToString(target) + " current=" + brToString(term) + " (exists?=" + termExists + ") validIndexPrefix=" + validIndexPrefix);
+        //if (DEBUG) {
+        //System.out.println("\nBTTR.seekExact seg=" + segment + " target=" + fieldInfo.name + ":" + brToString(target) + " current=" + brToString(term) + " (exists?=" + termExists + ") validIndexPrefix=" + validIndexPrefix);
         //   printSeekState();
-        // }
+        //}
 
         FST.Arc<BytesRef> arc;
         int targetUpto;
@@ -1531,9 +1528,7 @@
           // seeks to foobaz, we can re-use the seek state
           // for the first 5 bytes.
 
-          // if (DEBUG) {
-          //   System.out.println("  re-use current seek state validIndexPrefix=" + validIndexPrefix);
-          // }
+          //if (DEBUG) System.out.println("  re-use current seek state validIndexPrefix=" + validIndexPrefix);
 
           arc = arcs[0];
           assert arc.isFinal();
@@ -1553,16 +1548,14 @@
           // First compare up to valid seek frames:
           while (targetUpto < targetLimit) {
             cmp = (term.bytes[targetUpto]&0xFF) - (target.bytes[target.offset + targetUpto]&0xFF);
-            // if (DEBUG) {
-            //   System.out.println("    cycle targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")"   + " arc.output=" + arc.output + " output=" + output);
-            // }
+            //if (DEBUG) System.out.println("    cycle targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")"   + " arc.output=" + arc.output + " output=" + output);
             if (cmp != 0) {
               break;
             }
             arc = arcs[1+targetUpto];
-            //if (arc.label != (target.bytes[target.offset + targetUpto] & 0xFF)) {
+            if (arc.label != (target.bytes[target.offset + targetUpto] & 0xFF)) {
             //System.out.println("FAIL: arc.label=" + (char) arc.label + " targetLabel=" + (char) (target.bytes[target.offset + targetUpto] & 0xFF));
-            //}
+            }
             assert arc.label == (target.bytes[target.offset + targetUpto] & 0xFF): "arc.label=" + (char) arc.label + " targetLabel=" + (char) (target.bytes[target.offset + targetUpto] & 0xFF);
             if (arc.output != NO_OUTPUT) {
               output = fstOutputs.add(output, arc.output);
@@ -1583,9 +1576,7 @@
             final int targetLimit2 = Math.min(target.length, term.length);
             while (targetUpto < targetLimit2) {
               cmp = (term.bytes[targetUpto]&0xFF) - (target.bytes[target.offset + targetUpto]&0xFF);
-              // if (DEBUG) {
-              //   System.out.println("    cycle2 targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")");
-              // }
+              //if (DEBUG) System.out.println("    cycle2 targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")");
               if (cmp != 0) {
                 break;
               }
@@ -1602,9 +1593,7 @@
             // Common case: target term is after current
             // term, ie, app is seeking multiple terms
             // in sorted order
-            // if (DEBUG) {
-            //   System.out.println("  target is after current (shares prefixLen=" + targetUpto + "); frame.ord=" + lastFrame.ord);
-            // }
+            //if (DEBUG) System.out.println("  target is after current (shares prefixLen=" + targetUpto + "); frame.ord=" + lastFrame.ord);
             currentFrame = lastFrame;
 
           } else if (cmp > 0) {
@@ -1613,23 +1602,17 @@
             // keep the currentFrame but we must rewind it
             // (so we scan from the start)
             targetBeforeCurrentLength = 0;
-            // if (DEBUG) {
-            //   System.out.println("  target is before current (shares prefixLen=" + targetUpto + "); rewind frame ord=" + lastFrame.ord);
-            // }
+            //if (DEBUG) System.out.println("  target is before current (shares prefixLen=" + targetUpto + "); rewind frame ord=" + lastFrame.ord);
             currentFrame = lastFrame;
             currentFrame.rewind();
           } else {
             // Target is exactly the same as current term
             assert term.length == target.length;
             if (termExists) {
-              // if (DEBUG) {
-              //   System.out.println("  target is same as current; return true");
-              // }
+              //if (DEBUG) System.out.println("  target is same as current; return true");
               return true;
             } else {
-              // if (DEBUG) {
-              //   System.out.println("  target is same as current but term doesn't exist");
-              // }
+              //if (DEBUG) System.out.println("  target is same as current but term doesn't exist");
             }
             //validIndexPrefix = currentFrame.depth;
             //term.length = target.length;
@@ -1645,9 +1628,7 @@
           assert arc.isFinal();
           assert arc.output != null;
 
-          // if (DEBUG) {
-          //   System.out.println("    no seek state; push root frame");
-          // }
+          //if (DEBUG) System.out.println("    no seek state; push root frame");
 
           output = arc.output;
 
@@ -1658,9 +1639,7 @@
           currentFrame = pushFrame(arc, fstOutputs.add(output, arc.nextFinalOutput), 0);
         }
 
-        // if (DEBUG) {
-        //   System.out.println("  start index loop targetUpto=" + targetUpto + " output=" + output + " currentFrame.ord=" + currentFrame.ord + " targetBeforeCurrentLength=" + targetBeforeCurrentLength);
-        // }
+        //if (DEBUG) System.out.println("  start index loop targetUpto=" + targetUpto + " output=" + output + " currentFrame.ord=" + currentFrame.ord + " targetBeforeCurrentLength=" + targetBeforeCurrentLength);
 
         while (targetUpto < target.length) {
 
@@ -1671,9 +1650,7 @@
           if (nextArc == null) {
 
             // Index is exhausted
-            // if (DEBUG) {
-            //   System.out.println("    index: index exhausted label=" + ((char) targetLabel) + " " + toHex(targetLabel));
-            // }
+            //if (DEBUG) System.out.println("    index: index exhausted label=" + ((char) targetLabel) + " " + toHex(targetLabel));
             
             validIndexPrefix = currentFrame.prefix;
             //validIndexPrefix = targetUpto;
@@ -1684,9 +1661,7 @@
               termExists = false;
               term.bytes[targetUpto] = (byte) targetLabel;
               term.length = 1+targetUpto;
-              // if (DEBUG) {
-              //   System.out.println("  FAST NOT_FOUND term=" + brToString(term));
-              // }
+              //if (DEBUG) System.out.println("  FAST NOT_FOUND term=" + brToString(term));
               return false;
             }
 
@@ -1694,14 +1669,10 @@
 
             final SeekStatus result = currentFrame.scanToTerm(target, true);            
             if (result == SeekStatus.FOUND) {
-              // if (DEBUG) {
-              //   System.out.println("  return FOUND term=" + term.utf8ToString() + " " + term);
-              // }
+              //if (DEBUG) System.out.println("  return FOUND term=" + term.utf8ToString() + " " + term);
               return true;
             } else {
-              // if (DEBUG) {
-              //   System.out.println("  got " + result + "; return NOT_FOUND term=" + brToString(term));
-              // }
+              //if (DEBUG) System.out.println("  got " + result + "; return NOT_FOUND term=" + brToString(term));
               return false;
             }
           } else {
@@ -1714,9 +1685,7 @@
               output = fstOutputs.add(output, arc.output);
             }
 
-            // if (DEBUG) {
-            //   System.out.println("    index: follow label=" + toHex(target.bytes[target.offset + targetUpto]&0xff) + " arc.output=" + arc.output + " arc.nfo=" + arc.nextFinalOutput);
-            // }
+            //if (DEBUG) System.out.println("    index: follow label=" + toHex(target.bytes[target.offset + targetUpto]&0xff) + " arc.output=" + arc.output + " arc.nfo=" + arc.nextFinalOutput);
             targetUpto++;
 
             if (arc.isFinal()) {
@@ -1736,9 +1705,7 @@
         if (!currentFrame.hasTerms) {
           termExists = false;
           term.length = targetUpto;
-          // if (DEBUG) {
-          //   System.out.println("  FAST NOT_FOUND term=" + brToString(term));
-          // }
+          //if (DEBUG) System.out.println("  FAST NOT_FOUND term=" + brToString(term));
           return false;
         }
 
@@ -1746,15 +1713,10 @@
 
         final SeekStatus result = currentFrame.scanToTerm(target, true);            
         if (result == SeekStatus.FOUND) {
-          // if (DEBUG) {
-          //   System.out.println("  return FOUND term=" + term.utf8ToString() + " " + term);
-          // }
+          //if (DEBUG) System.out.println("  return FOUND term=" + term.utf8ToString() + " " + term);
           return true;
         } else {
-          // if (DEBUG) {
-          //   System.out.println("  got result " + result + "; return NOT_FOUND term=" + term.utf8ToString());
-          // }
-
+          //if (DEBUG) System.out.println("  got result " + result + "; return NOT_FOUND term=" + term.utf8ToString());
           return false;
         }
       }
@@ -1791,9 +1753,7 @@
           // seeks to foobaz, we can re-use the seek state
           // for the first 5 bytes.
 
-          //if (DEBUG) {
-          //System.out.println("  re-use current seek state validIndexPrefix=" + validIndexPrefix);
-          //}
+          //if (DEBUG) System.out.println("  re-use current seek state validIndexPrefix=" + validIndexPrefix);
 
           arc = arcs[0];
           assert arc.isFinal();
@@ -1813,9 +1773,7 @@
           // First compare up to valid seek frames:
           while (targetUpto < targetLimit) {
             cmp = (term.bytes[targetUpto]&0xFF) - (target.bytes[target.offset + targetUpto]&0xFF);
-            //if (DEBUG) {
-            //System.out.println("    cycle targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")"   + " arc.output=" + arc.output + " output=" + output);
-            //}
+            //if (DEBUG) System.out.println("    cycle targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")"   + " arc.output=" + arc.output + " output=" + output);
             if (cmp != 0) {
               break;
             }
@@ -1843,9 +1801,7 @@
             final int targetLimit2 = Math.min(target.length, term.length);
             while (targetUpto < targetLimit2) {
               cmp = (term.bytes[targetUpto]&0xFF) - (target.bytes[target.offset + targetUpto]&0xFF);
-              //if (DEBUG) {
-              //System.out.println("    cycle2 targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")");
-              //}
+              //if (DEBUG) System.out.println("    cycle2 targetUpto=" + targetUpto + " (vs limit=" + targetLimit + ") cmp=" + cmp + " (targetLabel=" + (char) (target.bytes[target.offset + targetUpto]) + " vs termLabel=" + (char) (term.bytes[targetUpto]) + ")");
               if (cmp != 0) {
                 break;
               }
@@ -1862,9 +1818,7 @@
             // Common case: target term is after current
             // term, ie, app is seeking multiple terms
             // in sorted order
-            //if (DEBUG) {
-            //System.out.println("  target is after current (shares prefixLen=" + targetUpto + "); clear frame.scanned ord=" + lastFrame.ord);
-            //}
+            //if (DEBUG) System.out.println("  target is after current (shares prefixLen=" + targetUpto + "); clear frame.scanned ord=" + lastFrame.ord);
             currentFrame = lastFrame;
 
           } else if (cmp > 0) {
@@ -1873,23 +1827,17 @@
             // keep the currentFrame but we must rewind it
             // (so we scan from the start)
             targetBeforeCurrentLength = 0;
-            //if (DEBUG) {
-            //System.out.println("  target is before current (shares prefixLen=" + targetUpto + "); rewind frame ord=" + lastFrame.ord);
-            //}
+            //if (DEBUG) System.out.println("  target is before current (shares prefixLen=" + targetUpto + "); rewind frame ord=" + lastFrame.ord);
             currentFrame = lastFrame;
             currentFrame.rewind();
           } else {
             // Target is exactly the same as current term
             assert term.length == target.length;
             if (termExists) {
-              //if (DEBUG) {
-              //System.out.println("  target is same as current; return FOUND");
-              //}
+              //if (DEBUG) System.out.println("  target is same as current; return FOUND");
               return SeekStatus.FOUND;
             } else {
-              //if (DEBUG) {
-              //System.out.println("  target is same as current but term doesn't exist");
-              //}
+              //if (DEBUG) System.out.println("  target is same as current but term doesn't exist");
             }
           }
 
@@ -1902,9 +1850,7 @@
           assert arc.isFinal();
           assert arc.output != null;
 
-          //if (DEBUG) {
-          //System.out.println("    no seek state; push root frame");
-          //}
+          //if (DEBUG) System.out.println("    no seek state; push root frame");
 
           output = arc.output;
 
@@ -1915,9 +1861,7 @@
           currentFrame = pushFrame(arc, fstOutputs.add(output, arc.nextFinalOutput), 0);
         }
 
-        //if (DEBUG) {
-        //System.out.println("  start index loop targetUpto=" + targetUpto + " output=" + output + " currentFrame.ord+1=" + currentFrame.ord + " targetBeforeCurrentLength=" + targetBeforeCurrentLength);
-        //}
+        //if (DEBUG) System.out.println("  start index loop targetUpto=" + targetUpto + " output=" + output + " currentFrame.ord+1=" + currentFrame.ord + " targetBeforeCurrentLength=" + targetBeforeCurrentLength);
 
         while (targetUpto < target.length) {
 
@@ -1928,9 +1872,7 @@
           if (nextArc == null) {
 
             // Index is exhausted
-            // if (DEBUG) {
-            //   System.out.println("    index: index exhausted label=" + ((char) targetLabel) + " " + toHex(targetLabel));
-            // }
+            //if (DEBUG) System.out.println("    index: index exhausted label=" + ((char) targetLabel) + " " + toHex(targetLabel));
             
             validIndexPrefix = currentFrame.prefix;
             //validIndexPrefix = targetUpto;
@@ -1945,20 +1887,14 @@
               termExists = false;
 
               if (next() != null) {
-                //if (DEBUG) {
-                //System.out.println("  return NOT_FOUND term=" + brToString(term) + " " + term);
-                //}
+                //if (DEBUG) System.out.println("  return NOT_FOUND term=" + brToString(term) + " " + term);
                 return SeekStatus.NOT_FOUND;
               } else {
-                //if (DEBUG) {
-                //System.out.println("  return END");
-                //}
+                //if (DEBUG) System.out.println("  return END");
                 return SeekStatus.END;
               }
             } else {
-              //if (DEBUG) {
-              //System.out.println("  return " + result + " term=" + brToString(term) + " " + term);
-              //}
+              //if (DEBUG) System.out.println("  return " + result + " term=" + brToString(term) + " " + term);
               return result;
             }
           } else {
@@ -1971,9 +1907,7 @@
               output = fstOutputs.add(output, arc.output);
             }
 
-            //if (DEBUG) {
-            //System.out.println("    index: follow label=" + toHex(target.bytes[target.offset + targetUpto]&0xff) + " arc.output=" + arc.output + " arc.nfo=" + arc.nextFinalOutput);
-            //}
+            //if (DEBUG) System.out.println("    index: follow label=" + toHex(target.bytes[target.offset + targetUpto]&0xff) + " arc.output=" + arc.output + " arc.nfo=" + arc.nextFinalOutput);
             targetUpto++;
 
             if (arc.isFinal()) {
@@ -1997,14 +1931,10 @@
           term.copyBytes(target);
           termExists = false;
           if (next() != null) {
-            //if (DEBUG) {
-            //System.out.println("  return NOT_FOUND term=" + term.utf8ToString() + " " + term);
-            //}
+            //if (DEBUG) System.out.println("  return NOT_FOUND term=" + term.utf8ToString() + " " + term);
             return SeekStatus.NOT_FOUND;
           } else {
-            //if (DEBUG) {
-            //System.out.println("  return END");
-            //}
+            //if (DEBUG) System.out.println("  return END");
             return SeekStatus.END;
           }
         } else {
@@ -2129,9 +2059,7 @@
             // Note that the seek state (last seek) has been
             // invalidated beyond this depth
             validIndexPrefix = Math.min(validIndexPrefix, currentFrame.prefix);
-            //if (DEBUG) {
-            //System.out.println("  reset validIndexPrefix=" + validIndexPrefix);
-            //}
+            //if (DEBUG) System.out.println("  reset validIndexPrefix=" + validIndexPrefix);
           }
         }
 
@@ -2178,13 +2106,9 @@
       @Override
       public DocsEnum docs(Bits skipDocs, DocsEnum reuse, int flags) throws IOException {
         assert !eof;
-        //if (DEBUG) {
-        //System.out.println("BTTR.docs seg=" + segment);
-        //}
+        //if (DEBUG) System.out.println("BTTR.docs seg=" + segment);
         currentFrame.decodeMetaData();
-        //if (DEBUG) {
-        //System.out.println("  state=" + currentFrame.state);
-        //}
+        //if (DEBUG) System.out.println("  state=" + currentFrame.state);
         return postingsReader.docs(fieldInfo, currentFrame.state, skipDocs, reuse, flags);
       }
 
@@ -2202,9 +2126,7 @@
 
       @Override
       public void seekExact(BytesRef target, TermState otherState) {
-        // if (DEBUG) {
-        //   System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState);
-        // }
+        //if (DEBUG) System.out.println("BTTR.seekExact termState seg=" + segment + " target=" + target.utf8ToString() + " " + target + " state=" + otherState);
         assert clearEOF();
         if (target.compareTo(term) != 0 || !termExists) {
           assert otherState != null && otherState instanceof BlockTermState;
@@ -2215,9 +2137,7 @@
           assert currentFrame.metaDataUpto > 0;
           validIndexPrefix = 0;
         } else {
-          // if (DEBUG) {
-          //   System.out.println("  skip seek: already on target state=" + currentFrame.state);
-          // }
+          //if (DEBUG) System.out.println("  skip seek: already on target state=" + currentFrame.state);
         }
       }
       
@@ -2299,8 +2219,9 @@
 
         public Frame(int ord) throws IOException {
           this.ord = ord;
-          state = postingsReader.newTermState();
-          state.totalTermFreq = -1;
+          this.state = new BlockTermState();
+          this.state.totalTermFreq = -1;
+          this.state.meta = postingsReader.newMetaData(fieldInfo);
         }
 
         public void setFloorData(ByteArrayDataInput in, BytesRef source) {
@@ -2312,9 +2233,7 @@
           floorDataReader.reset(floorData, 0, numBytes);
           numFollowFloorBlocks = floorDataReader.readVInt();
           nextFloorLabel = floorDataReader.readByte() & 0xff;
-          //if (DEBUG) {
-          //System.out.println("    setFloorData fpOrig=" + fpOrig + " bytes=" + new BytesRef(source.bytes, source.offset + in.getPosition(), numBytes) + " numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + toHex(nextFloorLabel));
-          //}
+          //if (DEBUG) System.out.println("    setFloorData fpOrig=" + fpOrig + " bytes=" + new BytesRef(source.bytes, source.offset + in.getPosition(), numBytes) + " numFollowFloorBlocks=" + numFollowFloorBlocks + " nextFloorLabel=" + toHex(nextFloorLabel));
         }
 
         public int getTermBlockOrd() {
@@ -2322,9 +2241,7 @@
         }
 
         void loadNextFloorBlock() throws IOException {
-          //if (DEBUG) {
-          //System.out.println("    loadNextFloorBlock fp=" + fp + " fpEnd=" + fpEnd);
-          //}
+          //if (DEBUG) System.out.println("    loadNextFloorBlock fp=" + fp + " fpEnd=" + fpEnd);
           assert arc == null || isFloor: "arc=" + arc + " isFloor=" + isFloor;
           fp = fpEnd;
           nextEnt = -1;
@@ -2403,9 +2320,7 @@
           // Sub-blocks of a single floor block are always
           // written one after another -- tail recurse:
           fpEnd = in.getFilePointer();
-          // if (DEBUG) {
-          //   System.out.println("      fpEnd=" + fpEnd);
-          // }
+          //if (DEBUG) System.out.println("      fpEnd=" + fpEnd);
         }
 
         void rewind() {
@@ -2421,7 +2336,7 @@
           }
 
           /*
-          //System.out.println("rewind");
+          System.out.println("rewind");
           // Keeps the block loaded, but rewinds its state:
           if (nextEnt > 0 || fp != fpOrig) {
             if (DEBUG) {
@@ -2499,9 +2414,7 @@
             termExists = false;
             subCode = suffixesReader.readVLong();
             lastSubFP = fp - subCode;
-            //if (DEBUG) {
-            //System.out.println("    lastSubFP=" + lastSubFP);
-            //}
+            //if (DEBUG) System.out.println("    lastSubFP=" + lastSubFP);
             return true;
           }
         }
@@ -2512,22 +2425,16 @@
         public void scanToFloorFrame(BytesRef target) {
 
           if (!isFloor || target.length <= prefix) {
-            // if (DEBUG) {
-            //   System.out.println("    scanToFloorFrame skip: isFloor=" + isFloor + " target.length=" + target.length + " vs prefix=" + prefix);
-            // }
+            //if (DEBUG) System.out.println("    scanToFloorFrame skip: isFloor=" + isFloor + " target.length=" + target.length + " vs prefix=" + prefix);
             return;
           }
 
           final int targetLabel = target.bytes[target.offset + prefix] & 0xFF;
 
-          // if (DEBUG) {
-          //   System.out.println("    scanToFloorFrame fpOrig=" + fpOrig + " targetLabel=" + toHex(targetLabel) + " vs nextFloorLabel=" + toHex(nextFloorLabel) + " numFollowFloorBlocks=" + numFollowFloorBlocks);
-          // }
+          //if (DEBUG) System.out.println("    scanToFloorFrame fpOrig=" + fpOrig + " targetLabel=" + toHex(targetLabel) + " vs nextFloorLabel=" + toHex(nextFloorLabel) + " numFollowFloorBlocks=" + numFollowFloorBlocks);
 
           if (targetLabel < nextFloorLabel) {
-            // if (DEBUG) {
-            //   System.out.println("      already on correct block");
-            // }
+            //if (DEBUG) System.out.println("      already on correct block");
             return;
           }
 
@@ -2538,25 +2445,19 @@
             final long code = floorDataReader.readVLong();
             newFP = fpOrig + (code >>> 1);
             hasTerms = (code & 1) != 0;
-            // if (DEBUG) {
-            //   System.out.println("      label=" + toHex(nextFloorLabel) + " fp=" + newFP + " hasTerms?=" + hasTerms + " numFollowFloor=" + numFollowFloorBlocks);
-            // }
+            //if (DEBUG) System.out.println("      label=" + toHex(nextFloorLabel) + " fp=" + newFP + " hasTerms?=" + hasTerms + " numFollowFloor=" + numFollowFloorBlocks);
             
             isLastInFloor = numFollowFloorBlocks == 1;
             numFollowFloorBlocks--;
 
             if (isLastInFloor) {
               nextFloorLabel = 256;
-              // if (DEBUG) {
-              //   System.out.println("        stop!  last block nextFloorLabel=" + toHex(nextFloorLabel));
-              // }
+              //if (DEBUG) System.out.println("        stop!  last block nextFloorLabel=" + toHex(nextFloorLabel));
               break;
             } else {
               nextFloorLabel = floorDataReader.readByte() & 0xff;
               if (targetLabel < nextFloorLabel) {
-                // if (DEBUG) {
-                //   System.out.println("        stop!  nextFloorLabel=" + toHex(nextFloorLabel));
-                // }
+                //if (DEBUG) System.out.println("        stop!  nextFloorLabel=" + toHex(nextFloorLabel));
                 break;
               }
             }
@@ -2564,15 +2465,11 @@
 
           if (newFP != fp) {
             // Force re-load of the block:
-            // if (DEBUG) {
-            //   System.out.println("      force switch to fp=" + newFP + " oldFP=" + fp);
-            // }
+            //if (DEBUG) System.out.println("      force switch to fp=" + newFP + " oldFP=" + fp);
             nextEnt = -1;
             fp = newFP;
           } else {
-            // if (DEBUG) {
-            //   System.out.println("      stay on same fp=" + newFP);
-            // }
+            //if (DEBUG) System.out.println("      stay on same fp=" + newFP);
           }
         }
     
@@ -2670,7 +2567,7 @@
         // scan the entries check if the suffix matches.
         public SeekStatus scanToTermLeaf(BytesRef target, boolean exactOnly) throws IOException {
 
-          // if (DEBUG) System.out.println("    scanToTermLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term));
+          //if (DEBUG) System.out.println("    scanToTermLeaf: block fp=" + fp + " prefix=" + prefix + " nextEnt=" + nextEnt + " (of " + entCount + ") target=" + brToString(target) + " term=" + brToString(term));
 
           assert nextEnt != -1;
 
@@ -2693,13 +2590,13 @@
 
             suffix = suffixesReader.readVInt();
 
-            // if (DEBUG) {
+            //if (DEBUG) {
             //   BytesRef suffixBytesRef = new BytesRef();
             //   suffixBytesRef.bytes = suffixBytes;
             //   suffixBytesRef.offset = suffixesReader.getPosition();
             //   suffixBytesRef.length = suffix;
-            //   System.out.println("      cycle: term " + (nextEnt-1) + " (of " + entCount + ") suffix=" + brToString(suffixBytesRef));
-            // }
+            //System.out.println("      cycle: term " + (nextEnt-1) + " (of " + entCount + ") suffix=" + brToString(suffixBytesRef));
+            //}
 
             final int termLen = prefix + suffix;
             startBytePos = suffixesReader.getPosition();
@@ -2817,13 +2714,13 @@
 
             final int code = suffixesReader.readVInt();
             suffix = code >>> 1;
-            // if (DEBUG) {
+            //if (DEBUG) {
             //   BytesRef suffixBytesRef = new BytesRef();
             //   suffixBytesRef.bytes = suffixBytes;
             //   suffixBytesRef.offset = suffixesReader.getPosition();
             //   suffixBytesRef.length = suffix;
-            //   System.out.println("      cycle: " + ((code&1)==1 ? "sub-block" : "term") + " " + (nextEnt-1) + " (of " + entCount + ") suffix=" + brToString(suffixBytesRef));
-            // }
+            //System.out.println("      cycle: " + ((code&1)==1 ? "sub-block" : "term") + " " + (nextEnt-1) + " (of " + entCount + ") suffix=" + brToString(suffixBytesRef));
+            //}
 
             termExists = (code & 1) == 0;
             final int termLen = prefix + suffix;
Index: lucene/core/src/java/org/apache/lucene/codecs/PostingsReaderBase.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/PostingsReaderBase.java	(revision 1492635)
+++ lucene/core/src/java/org/apache/lucene/codecs/PostingsReaderBase.java	(working copy)
@@ -52,7 +52,7 @@
   public abstract void init(IndexInput termsIn) throws IOException;
 
   /** Return a newly created empty TermState */
-  public abstract BlockTermState newTermState() throws IOException;
+  public abstract TermMetaData newMetaData(FieldInfo info) throws IOException;
 
   /** Actually decode metadata for next term */
   public abstract void nextTerm(FieldInfo fieldInfo, BlockTermState state) throws IOException;
Index: lucene/core/src/java/org/apache/lucene/codecs/TermMetaData.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/TermMetaData.java	(revision 0)
+++ lucene/core/src/java/org/apache/lucene/codecs/TermMetaData.java	(working copy)
@@ -0,0 +1,99 @@
+package org.apache.lucene.codecs;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.store.DataInput;
+import org.apache.lucene.store.DataOutput;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LongsRef;
+
+public class TermMetaData implements Cloneable {
+  // It consists of two parts:
+  //
+  // The base part: 
+  //   long array, in which every value increases 
+  //   monotonically at the same time.
+  //
+  // The extend part:
+  //   byte array, in which non-monotonical values
+  //   are stored/encoded.
+  //
+  // NOTE: For raw output, 
+  // it is always assumed that, when we have
+  //   this.base[i] < another.base[i],
+  // for all j in [base.offset, base.end)
+  //   this.base[j] <= another.base[j]
+  // with this property, we might have better compression 
+  // for base part.
+  //
+  // However, this property is not guraranteed for all intermediate
+  // outputs in a FST, e.g. a TermMetaData shared by two arcs might
+  // get a 'skewed' output, which is not possible to be compared with others
+  // Therefore during building phase, we have to iterate each long value 
+  // to see whether the 'comparable' property still holds.
+  //
+  // NOTE: only use signed part of long value, which is 63 bits
+  //
+  protected LongsRef base;
+  protected BytesRef extend;
+
+  protected ByteBuffer buffer;
+
+  public TermMetaData() {
+    this.base = null;
+    this.extend = null;
+    this.buffer = null;
+  }
+
+  public TermMetaData(LongsRef l, BytesRef b) {
+    this.base = l;
+    this.extend = b;
+    this.buffer = ByteBuffer.wrap(extend.bytes, extend.offset, extend.length);
+  }
+  public TermMetaData(int baseLength, int extendLength) {
+    if (baseLength > 0) {
+      this.base = new LongsRef(new long[baseLength], 0, baseLength);
+    } else {
+      this.base = null;
+    }
+    if (extendLength > 0) {
+      this.extend = new BytesRef(new byte[extendLength]);
+      this.buffer = ByteBuffer.wrap(extend.bytes, extend.offset, extend.length);
+    } else {
+      this.extend = null;
+    }
+  }
+
+  public TermMetaData clone() {
+    try {
+      return (TermMetaData)super.clone();
+    } catch (CloneNotSupportedException cnse) {
+      // should not happen
+      throw new RuntimeException(cnse);
+    }
+  } 
+  
+  public void copyFrom(TermMetaData other) { // nocommit: no deepcopy!
+    if (other.base != null) {
+      this.base = LongsRef.deepCopyOf(other.base);
+    }
+    if (other.extend != null) {
+      this.extend = BytesRef.deepCopyOf(other.extend);
+      this.buffer = ByteBuffer.wrap(extend.bytes, extend.offset, extend.length);
+    }
+  }
+
+  public String toString() {
+    return "TermMetaData";
+  }
+  
+  public void write(DataOutput out, TermState state) throws IOException {
+    throw new IllegalStateException("not implemented");
+  }
+  public void read(DataInput in, TermState state) throws IOException {
+    throw new IllegalStateException("not implemented");
+  }
+
+}
Index: lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40MetaData.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40MetaData.java	(revision 0)
+++ lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40MetaData.java	(working copy)
@@ -0,0 +1,58 @@
+package org.apache.lucene.codecs.lucene40;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.lucene.codecs.BlockTermState;
+import org.apache.lucene.codecs.TermMetaData;
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.util.IOUtils;
+
+final class Lucene40MetaData extends TermMetaData {
+  // NOTE: Only used for reader side
+  // NOTE: Only used by the "primary" MetaData -- clones don't
+  // copy this (basically they are "transient"):
+  ByteArrayDataInput bytesReader;  // TODO: should this NOT be in the TermState...?
+  byte[] bytes;
+
+  public Lucene40MetaData(long freqOffset, long skipOffset, long proxOffset) {
+    //super(proxOffset != -1 ? 2 : 1, 8);
+    super(2, 8);
+    setFreqOffset(freqOffset);
+    setProxOffset(proxOffset);
+    setSkipOffset(skipOffset);
+  }
+  public Lucene40MetaData(FieldInfo info) {
+    super(2, 8);
+  }
+  public Lucene40MetaData() {
+  }
+
+  public void setFreqOffset(long freqOffset) {
+    base.longs[0] = freqOffset;
+  }
+  public void setProxOffset(long proxOffset) {
+    base.longs[1] = proxOffset;
+  }
+  public void setSkipOffset(long skipOffset) {
+    buffer.putLong(0, skipOffset);
+  }
+  public long freqOffset() {
+    return base.longs[0];
+  }
+  public long proxOffset() {
+    return base.longs[1];
+  }
+  public long skipOffset() {
+    return buffer.getLong(0);
+  }
+
+  @Override
+  public Lucene40MetaData clone() {
+    Lucene40MetaData meta = new Lucene40MetaData();
+    meta.copyFrom(this);
+    return meta;
+  }
+}
Index: lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsReader.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsReader.java	(revision 1492635)
+++ lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsReader.java	(working copy)
@@ -23,6 +23,7 @@
 import org.apache.lucene.codecs.BlockTermState;
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.PostingsReaderBase;
+import org.apache.lucene.codecs.TermMetaData;
 import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
@@ -115,47 +116,9 @@
     skipMinimum = termsIn.readInt();
   }
 
-  // Must keep final because we do non-standard clone
-  private final static class StandardTermState extends BlockTermState {
-    long freqOffset;
-    long proxOffset;
-    long skipOffset;
-
-    // Only used by the "primary" TermState -- clones don't
-    // copy this (basically they are "transient"):
-    ByteArrayDataInput bytesReader;  // TODO: should this NOT be in the TermState...?
-    byte[] bytes;
-
-    @Override
-    public StandardTermState clone() {
-      StandardTermState other = new StandardTermState();
-      other.copyFrom(this);
-      return other;
-    }
-
-    @Override
-    public void copyFrom(TermState _other) {
-      super.copyFrom(_other);
-      StandardTermState other = (StandardTermState) _other;
-      freqOffset = other.freqOffset;
-      proxOffset = other.proxOffset;
-      skipOffset = other.skipOffset;
-
-      // Do not copy bytes, bytesReader (else TermState is
-      // very heavy, ie drags around the entire block's
-      // byte[]).  On seek back, if next() is in fact used
-      // (rare!), they will be re-read from disk.
-    }
-
-    @Override
-    public String toString() {
-      return super.toString() + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " skipOffset=" + skipOffset;
-    }
-  }
-
   @Override
-  public BlockTermState newTermState() {
-    return new StandardTermState();
+  public TermMetaData newMetaData(FieldInfo info) {
+    return new Lucene40MetaData(info);
   }
 
   @Override
@@ -174,68 +137,68 @@
   /* Reads but does not decode the byte[] blob holding
      metadata for the current terms block */
   @Override
-  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState _termState) throws IOException {
-    final StandardTermState termState = (StandardTermState) _termState;
+  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState state) throws IOException {
+    final Lucene40MetaData meta= (Lucene40MetaData) state.meta;
 
     final int len = termsIn.readVInt();
 
-    // if (DEBUG) System.out.println("  SPR.readTermsBlock bytes=" + len + " ts=" + _termState);
-    if (termState.bytes == null) {
-      termState.bytes = new byte[ArrayUtil.oversize(len, 1)];
-      termState.bytesReader = new ByteArrayDataInput();
-    } else if (termState.bytes.length < len) {
-      termState.bytes = new byte[ArrayUtil.oversize(len, 1)];
+    // if (DEBUG) System.out.println("  SPR.readTermsBlock bytes=" + len + " ts=" + _meta);
+    if (meta.bytes == null) {
+      meta.bytes = new byte[ArrayUtil.oversize(len, 1)];
+      meta.bytesReader = new ByteArrayDataInput();
+    } else if (meta.bytes.length < len) {
+      meta.bytes = new byte[ArrayUtil.oversize(len, 1)];
     }
 
-    termsIn.readBytes(termState.bytes, 0, len);
-    termState.bytesReader.reset(termState.bytes, 0, len);
+    termsIn.readBytes(meta.bytes, 0, len);
+    meta.bytesReader.reset(meta.bytes, 0, len);
   }
 
   @Override
-  public void nextTerm(FieldInfo fieldInfo, BlockTermState _termState)
+  public void nextTerm(FieldInfo fieldInfo, BlockTermState state)
     throws IOException {
-    final StandardTermState termState = (StandardTermState) _termState;
-    // if (DEBUG) System.out.println("SPR: nextTerm seg=" + segment + " tbOrd=" + termState.termBlockOrd + " bytesReader.fp=" + termState.bytesReader.getPosition());
-    final boolean isFirstTerm = termState.termBlockOrd == 0;
+    final Lucene40MetaData meta= (Lucene40MetaData) state.meta;
+    // if (DEBUG) System.out.println("SPR: nextTerm seg=" + segment + " tbOrd=" + state.termBlockOrd + " bytesReader.fp=" + meta.bytesReader.getPosition());
+    final boolean isFirstTerm = state.termBlockOrd == 0;
 
     if (isFirstTerm) {
-      termState.freqOffset = termState.bytesReader.readVLong();
+      meta.setFreqOffset(meta.bytesReader.readVLong());
     } else {
-      termState.freqOffset += termState.bytesReader.readVLong();
+      meta.setFreqOffset(meta.freqOffset() + meta.bytesReader.readVLong());
     }
     /*
     if (DEBUG) {
-      System.out.println("  dF=" + termState.docFreq);
-      System.out.println("  freqFP=" + termState.freqOffset);
+      System.out.println("  dF=" + state.docFreq);
+      System.out.println("  freqFP=" + meta.freqOffset);
     }
     */
-    assert termState.freqOffset < freqIn.length();
+    assert meta.freqOffset() < freqIn.length();
 
-    if (termState.docFreq >= skipMinimum) {
-      termState.skipOffset = termState.bytesReader.readVLong();
-      // if (DEBUG) System.out.println("  skipOffset=" + termState.skipOffset + " vs freqIn.length=" + freqIn.length());
-      assert termState.freqOffset + termState.skipOffset < freqIn.length();
+    if (state.docFreq >= skipMinimum) {
+      meta.setSkipOffset(meta.bytesReader.readVLong());
+      // if (DEBUG) System.out.println("  skipOffset=" + meta.skipOffset() + " vs freqIn.length=" + freqIn.length());
+      assert meta.freqOffset() + meta.skipOffset() < freqIn.length();
     } else {
       // undefined
     }
 
     if (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) {
       if (isFirstTerm) {
-        termState.proxOffset = termState.bytesReader.readVLong();
+        meta.setProxOffset(meta.bytesReader.readVLong());
       } else {
-        termState.proxOffset += termState.bytesReader.readVLong();
+        meta.setProxOffset(meta.proxOffset() + meta.bytesReader.readVLong());
       }
-      // if (DEBUG) System.out.println("  proxFP=" + termState.proxOffset);
+      // if (DEBUG) System.out.println("  proxFP=" + meta.proxOffset());
     }
   }
     
   @Override
-  public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
+  public DocsEnum docs(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
     if (canReuse(reuse, liveDocs)) {
-      // if (DEBUG) System.out.println("SPR.docs ts=" + termState);
-      return ((SegmentDocsEnumBase) reuse).reset(fieldInfo, (StandardTermState)termState);
+      // if (DEBUG) System.out.println("SPR.docs ts=" + state);
+      return ((SegmentDocsEnumBase) reuse).reset(fieldInfo, state);
     }
-    return newDocsEnum(liveDocs, fieldInfo, (StandardTermState)termState);
+    return newDocsEnum(liveDocs, fieldInfo, state);
   }
   
   private boolean canReuse(DocsEnum reuse, Bits liveDocs) {
@@ -252,16 +215,16 @@
     return false;
   }
   
-  private DocsEnum newDocsEnum(Bits liveDocs, FieldInfo fieldInfo, StandardTermState termState) throws IOException {
+  private DocsEnum newDocsEnum(Bits liveDocs, FieldInfo fieldInfo, BlockTermState state) throws IOException {
     if (liveDocs == null) {
-      return new AllDocsSegmentDocsEnum(freqIn).reset(fieldInfo, termState);
+      return new AllDocsSegmentDocsEnum(freqIn).reset(fieldInfo, state);
     } else {
-      return new LiveDocsSegmentDocsEnum(freqIn, liveDocs).reset(fieldInfo, termState);
+      return new LiveDocsSegmentDocsEnum(freqIn, liveDocs).reset(fieldInfo, state);
     }
   }
 
   @Override
-  public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs,
+  public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs,
                                                DocsAndPositionsEnum reuse, int flags)
     throws IOException {
 
@@ -284,7 +247,7 @@
           docsEnum = new SegmentFullPositionsEnum(freqIn, proxIn);
         }
       }
-      return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
+      return docsEnum.reset(fieldInfo, state, liveDocs);
     } else {
       SegmentDocsAndPositionsEnum docsEnum;
       if (reuse == null || !(reuse instanceof SegmentDocsAndPositionsEnum)) {
@@ -298,7 +261,7 @@
           docsEnum = new SegmentDocsAndPositionsEnum(freqIn, proxIn);
         }
       }
-      return docsEnum.reset(fieldInfo, (StandardTermState) termState, liveDocs);
+      return docsEnum.reset(fieldInfo, state, liveDocs);
     }
   }
 
@@ -342,18 +305,19 @@
     }
     
     
-    DocsEnum reset(FieldInfo fieldInfo, StandardTermState termState) throws IOException {
+    DocsEnum reset(FieldInfo fieldInfo, BlockTermState state) throws IOException {
+      Lucene40MetaData meta = (Lucene40MetaData) state.meta;
       indexOmitsTF = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY;
       storePayloads = fieldInfo.hasPayloads();
       storeOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
-      freqOffset = termState.freqOffset;
-      skipOffset = termState.skipOffset;
+      freqOffset = meta.freqOffset();
+      skipOffset = meta.skipOffset();
 
       // TODO: for full enum case (eg segment merging) this
       // seek is unnecessary; maybe we can avoid in such
       // cases
-      freqIn.seek(termState.freqOffset);
-      limit = termState.docFreq;
+      freqIn.seek(freqOffset);
+      limit = state.docFreq;
       assert limit > 0;
       ord = 0;
       doc = -1;
@@ -725,7 +689,8 @@
       this.proxIn = proxIn.clone();
     }
 
-    public SegmentDocsAndPositionsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
+    public SegmentDocsAndPositionsEnum reset(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs) throws IOException {
+      final Lucene40MetaData meta = (Lucene40MetaData) state.meta;
       assert fieldInfo.getIndexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
       assert !fieldInfo.hasPayloads();
 
@@ -734,10 +699,10 @@
       // TODO: for full enum case (eg segment merging) this
       // seek is unnecessary; maybe we can avoid in such
       // cases
-      freqIn.seek(termState.freqOffset);
-      lazyProxPointer = termState.proxOffset;
+      freqIn.seek(meta.freqOffset());
+      lazyProxPointer = meta.proxOffset();
 
-      limit = termState.docFreq;
+      limit = state.docFreq;
       assert limit > 0;
 
       ord = 0;
@@ -748,9 +713,9 @@
       skipped = false;
       posPendingCount = 0;
 
-      freqOffset = termState.freqOffset;
-      proxOffset = termState.proxOffset;
-      skipOffset = termState.skipOffset;
+      freqOffset = meta.freqOffset();
+      proxOffset = meta.proxOffset();
+      skipOffset = meta.skipOffset();
       // if (DEBUG) System.out.println("StandardR.D&PE reset seg=" + segment + " limit=" + limit + " freqFP=" + freqOffset + " proxFP=" + proxOffset);
 
       return this;
@@ -938,7 +903,8 @@
       this.proxIn = proxIn.clone();
     }
 
-    public SegmentFullPositionsEnum reset(FieldInfo fieldInfo, StandardTermState termState, Bits liveDocs) throws IOException {
+    public SegmentFullPositionsEnum reset(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs) throws IOException {
+      final Lucene40MetaData meta = (Lucene40MetaData) state.meta;
       storeOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
       storePayloads = fieldInfo.hasPayloads();
       assert fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
@@ -953,10 +919,10 @@
       // TODO: for full enum case (eg segment merging) this
       // seek is unnecessary; maybe we can avoid in such
       // cases
-      freqIn.seek(termState.freqOffset);
-      lazyProxPointer = termState.proxOffset;
+      freqIn.seek(meta.freqOffset());
+      lazyProxPointer = meta.proxOffset();
 
-      limit = termState.docFreq;
+      limit = state.docFreq;
       ord = 0;
       doc = -1;
       accum = 0;
@@ -967,9 +933,9 @@
       posPendingCount = 0;
       payloadPending = false;
 
-      freqOffset = termState.freqOffset;
-      proxOffset = termState.proxOffset;
-      skipOffset = termState.skipOffset;
+      freqOffset = meta.freqOffset();
+      proxOffset = meta.proxOffset();
+      skipOffset = meta.skipOffset();
       //System.out.println("StandardR.D&PE reset seg=" + segment + " limit=" + limit + " freqFP=" + freqOffset + " proxFP=" + proxOffset + " this=" + this);
 
       return this;
Index: lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41MetaData.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41MetaData.java	(revision 0)
+++ lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41MetaData.java	(working copy)
@@ -0,0 +1,99 @@
+package org.apache.lucene.codecs.lucene41;
+
+import static org.apache.lucene.codecs.lucene41.Lucene41PostingsFormat.BLOCK_SIZE;
+import static org.apache.lucene.codecs.lucene41.ForUtil.MAX_DATA_SIZE;
+import static org.apache.lucene.codecs.lucene41.ForUtil.MAX_ENCODED_SIZE;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.nio.ByteBuffer;
+
+import org.apache.lucene.codecs.BlockTermState;
+import org.apache.lucene.codecs.TermMetaData;
+import org.apache.lucene.index.TermState;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfo.IndexOptions;
+import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LongsRef;
+
+final class Lucene41MetaData extends TermMetaData {
+  // NOTE: Only used for reader side
+  // NOTE: Only used by the "primary" MetaData -- clones don't
+  // copy this (basically they are "transient"):
+  ByteArrayDataInput bytesReader;
+  byte[] bytes;
+
+  public Lucene41MetaData(long docStartFP, long posStartFP, long payStartFP, long skipOffset, long lastPosBlockOffset, int singletonDocID) {
+    // nocommit: temperary omit variable length
+    //super(posStartFP != -1 ? 3 : 1, posStartFP != -1 ? 20 : 4);
+    super(3, 20);
+    setDocFP(docStartFP);
+    setSingletonDocID(singletonDocID);
+    //if (posStartFP != -1) {
+      setPosFP(posStartFP);
+      setPayFP(payStartFP);
+      setSkipOffset(skipOffset);
+      setLastPosBlockOffset(lastPosBlockOffset);
+    //}
+  }
+  public Lucene41MetaData() {
+  }
+  public Lucene41MetaData(FieldInfo info) {
+    this(info.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0);
+  }
+  public Lucene41MetaData(boolean hasPositions) {
+    //super(hasPositions ? 3 : 1, hasPositions ? 20 : 4);
+    super(3, 20);
+  }
+
+  public void setSingletonDocID(int singletonDocID) {
+    buffer.putInt(0, singletonDocID);
+  }
+  public void setSkipOffset(long skipOffset) {
+    buffer.putLong(4, skipOffset);
+  }
+  public void setLastPosBlockOffset(long lastPosBlockOffset) {
+    buffer.putLong(12, lastPosBlockOffset);
+  }
+  public void setDocFP(long docFP) {
+    base.longs[0] = docFP;
+  }
+  public void setPosFP(long posFP) {
+    base.longs[1] = posFP;
+  }
+  public void setPayFP(long payFP) {
+    base.longs[2] = payFP;
+  }
+
+  public int singletonDocID() {
+    return buffer.getInt(0);
+  }
+  public long skipOffset() {
+    return buffer.getLong(4);
+  }
+  public long lastPosBlockOffset() {
+    return buffer.getLong(12);
+  }
+  public long docFP() {
+    return base.longs[0];
+  }
+  public long posFP() {
+    return base.longs[1];
+  }
+  public long payFP() {
+    return base.longs[2];
+  }
+  @Override
+  public Lucene41MetaData clone() {
+    Lucene41MetaData meta = (Lucene41MetaData)super.clone();
+    meta.copyFrom(this);
+    return meta;
+  }
+
+  @Override
+  public String toString() {
+    return "docStartFP=" + docFP() + " posStartFP=" + posFP() + " payStartFP=" + payFP() + " lastPosBlockOffset=" + lastPosBlockOffset() + " singletonDocID=" + singletonDocID();
+  }
+}
Index: lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41PostingsReader.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41PostingsReader.java	(revision 1492635)
+++ lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41PostingsReader.java	(working copy)
@@ -27,14 +27,15 @@
 import org.apache.lucene.codecs.BlockTermState;
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.PostingsReaderBase;
+import org.apache.lucene.codecs.TermMetaData;
 import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.TermState;
 import org.apache.lucene.index.SegmentInfo;
-import org.apache.lucene.index.TermState;
 import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.DataInput;
 import org.apache.lucene.store.Directory;
@@ -141,55 +142,9 @@
     }
   }
 
-  // Must keep final because we do non-standard clone
-  private final static class IntBlockTermState extends BlockTermState {
-    long docStartFP;
-    long posStartFP;
-    long payStartFP;
-    long skipOffset;
-    long lastPosBlockOffset;
-    // docid when there is a single pulsed posting, otherwise -1
-    // freq is always implicitly totalTermFreq in this case.
-    int singletonDocID;
-
-    // Only used by the "primary" TermState -- clones don't
-    // copy this (basically they are "transient"):
-    ByteArrayDataInput bytesReader;  // TODO: should this NOT be in the TermState...?
-    byte[] bytes;
-
-    @Override
-    public IntBlockTermState clone() {
-      IntBlockTermState other = new IntBlockTermState();
-      other.copyFrom(this);
-      return other;
-    }
-
-    @Override
-    public void copyFrom(TermState _other) {
-      super.copyFrom(_other);
-      IntBlockTermState other = (IntBlockTermState) _other;
-      docStartFP = other.docStartFP;
-      posStartFP = other.posStartFP;
-      payStartFP = other.payStartFP;
-      lastPosBlockOffset = other.lastPosBlockOffset;
-      skipOffset = other.skipOffset;
-      singletonDocID = other.singletonDocID;
-
-      // Do not copy bytes, bytesReader (else TermState is
-      // very heavy, ie drags around the entire block's
-      // byte[]).  On seek back, if next() is in fact used
-      // (rare!), they will be re-read from disk.
-    }
-
-    @Override
-    public String toString() {
-      return super.toString() + " docStartFP=" + docStartFP + " posStartFP=" + posStartFP + " payStartFP=" + payStartFP + " lastPosBlockOffset=" + lastPosBlockOffset + " singletonDocID=" + singletonDocID;
-    }
-  }
-
   @Override
-  public IntBlockTermState newTermState() {
-    return new IntBlockTermState();
+  public TermMetaData newMetaData(FieldInfo info) {
+    return new Lucene41MetaData(info);
   }
 
   @Override
@@ -200,87 +155,87 @@
   /* Reads but does not decode the byte[] blob holding
      metadata for the current terms block */
   @Override
-  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState _termState) throws IOException {
-    final IntBlockTermState termState = (IntBlockTermState) _termState;
+  public void readTermsBlock(IndexInput termsIn, FieldInfo fieldInfo, BlockTermState state) throws IOException {
+    final Lucene41MetaData meta= (Lucene41MetaData) state.meta;
 
     final int numBytes = termsIn.readVInt();
 
-    if (termState.bytes == null) {
-      termState.bytes = new byte[ArrayUtil.oversize(numBytes, 1)];
-      termState.bytesReader = new ByteArrayDataInput();
-    } else if (termState.bytes.length < numBytes) {
-      termState.bytes = new byte[ArrayUtil.oversize(numBytes, 1)];
+    if (meta.bytes == null) {
+      meta.bytes = new byte[ArrayUtil.oversize(numBytes, 1)];
+      meta.bytesReader = new ByteArrayDataInput();
+    } else if (meta.bytes.length < numBytes) {
+      meta.bytes = new byte[ArrayUtil.oversize(numBytes, 1)];
     }
 
-    termsIn.readBytes(termState.bytes, 0, numBytes);
-    termState.bytesReader.reset(termState.bytes, 0, numBytes);
+    termsIn.readBytes(meta.bytes, 0, numBytes);
+    meta.bytesReader.reset(meta.bytes, 0, numBytes);
   }
 
   @Override
-  public void nextTerm(FieldInfo fieldInfo, BlockTermState _termState)
+  public void nextTerm(FieldInfo fieldInfo, BlockTermState state)
     throws IOException {
-    final IntBlockTermState termState = (IntBlockTermState) _termState;
-    final boolean isFirstTerm = termState.termBlockOrd == 0;
+    final Lucene41MetaData meta= (Lucene41MetaData) state.meta;
+    final boolean isFirstTerm = state.termBlockOrd == 0;
     final boolean fieldHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0;
     final boolean fieldHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0;
     final boolean fieldHasPayloads = fieldInfo.hasPayloads();
 
-    final DataInput in = termState.bytesReader;
+    final DataInput in = meta.bytesReader;
     if (isFirstTerm) {
-      if (termState.docFreq == 1) {
-        termState.singletonDocID = in.readVInt();
-        termState.docStartFP = 0;
+      if (state.docFreq == 1) {
+        meta.setSingletonDocID(in.readVInt());
+        meta.setDocFP(0);
       } else {
-        termState.singletonDocID = -1;
-        termState.docStartFP = in.readVLong();
+        meta.setSingletonDocID(-1);
+        meta.setDocFP(in.readVLong());
       }
       if (fieldHasPositions) {
-        termState.posStartFP = in.readVLong();
-        if (termState.totalTermFreq > BLOCK_SIZE) {
-          termState.lastPosBlockOffset = in.readVLong();
+        meta.setPosFP(in.readVLong());
+        if (state.totalTermFreq > BLOCK_SIZE) {
+          meta.setLastPosBlockOffset(in.readVLong());
         } else {
-          termState.lastPosBlockOffset = -1;
+          meta.setLastPosBlockOffset(-1);
         }
-        if ((fieldHasPayloads || fieldHasOffsets) && termState.totalTermFreq >= BLOCK_SIZE) {
-          termState.payStartFP = in.readVLong();
+        if ((fieldHasPayloads || fieldHasOffsets) && state.totalTermFreq >= BLOCK_SIZE) {
+          meta.setPayFP(in.readVLong());
         } else {
-          termState.payStartFP = -1;
+          meta.setPayFP(-1);
         }
       }
     } else {
-      if (termState.docFreq == 1) {
-        termState.singletonDocID = in.readVInt();
+      if (state.docFreq == 1) {
+        meta.setSingletonDocID(in.readVInt());
       } else {
-        termState.singletonDocID = -1;
-        termState.docStartFP += in.readVLong();
+        meta.setSingletonDocID(-1);
+        meta.setDocFP(meta.docFP() + in.readVLong());
       }
       if (fieldHasPositions) {
-        termState.posStartFP += in.readVLong();
-        if (termState.totalTermFreq > BLOCK_SIZE) {
-          termState.lastPosBlockOffset = in.readVLong();
+        meta.setPosFP(meta.posFP() + in.readVLong());
+        if (state.totalTermFreq > BLOCK_SIZE) {
+          meta.setLastPosBlockOffset(in.readVLong());
         } else {
-          termState.lastPosBlockOffset = -1;
+          meta.setLastPosBlockOffset(-1);
         }
-        if ((fieldHasPayloads || fieldHasOffsets) && termState.totalTermFreq >= BLOCK_SIZE) {
+        if ((fieldHasPayloads || fieldHasOffsets) && state.totalTermFreq >= BLOCK_SIZE) {
           long delta = in.readVLong();
-          if (termState.payStartFP == -1) {
-            termState.payStartFP = delta;
+          if (meta.payFP() == -1) {
+            meta.setPayFP(delta);
           } else {
-            termState.payStartFP += delta;
+            meta.setPayFP(meta.payFP() + delta);
           }
         }
       }
     }
 
-    if (termState.docFreq > BLOCK_SIZE) {
-      termState.skipOffset = in.readVLong();
+    if (state.docFreq > BLOCK_SIZE) {
+      meta.setSkipOffset(in.readVLong());
     } else {
-      termState.skipOffset = -1;
+      meta.setSkipOffset(-1);
     }
   }
     
   @Override
-  public DocsEnum docs(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
+  public DocsEnum docs(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs, DocsEnum reuse, int flags) throws IOException {
     BlockDocsEnum docsEnum;
     if (reuse instanceof BlockDocsEnum) {
       docsEnum = (BlockDocsEnum) reuse;
@@ -290,13 +245,13 @@
     } else {
       docsEnum = new BlockDocsEnum(fieldInfo);
     }
-    return docsEnum.reset(liveDocs, (IntBlockTermState) termState, flags);
+    return docsEnum.reset(liveDocs, state, flags);
   }
 
   // TODO: specialize to liveDocs vs not
   
   @Override
-  public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState termState, Bits liveDocs,
+  public DocsAndPositionsEnum docsAndPositions(FieldInfo fieldInfo, BlockTermState state, Bits liveDocs,
                                                DocsAndPositionsEnum reuse, int flags)
     throws IOException {
 
@@ -314,7 +269,7 @@
       } else {
         docsAndPositionsEnum = new BlockDocsAndPositionsEnum(fieldInfo);
       }
-      return docsAndPositionsEnum.reset(liveDocs, (IntBlockTermState) termState);
+      return docsAndPositionsEnum.reset(liveDocs, state);
     } else {
       EverythingEnum everythingEnum;
       if (reuse instanceof EverythingEnum) {
@@ -325,7 +280,7 @@
       } else {
         everythingEnum = new EverythingEnum(fieldInfo);
       }
-      return everythingEnum.reset(liveDocs, (IntBlockTermState) termState, flags);
+      return everythingEnum.reset(liveDocs, state, flags);
     }
   }
 
@@ -389,16 +344,17 @@
         indexHasPayloads == fieldInfo.hasPayloads();
     }
     
-    public DocsEnum reset(Bits liveDocs, IntBlockTermState termState, int flags) throws IOException {
+    public DocsEnum reset(Bits liveDocs, BlockTermState state, int flags) throws IOException {
+      final Lucene41MetaData meta = (Lucene41MetaData) state.meta;
       this.liveDocs = liveDocs;
       // if (DEBUG) {
-      //   System.out.println("  FPR.reset: termState=" + termState);
+      //   System.out.println("  FPR.reset: state=" + state);
       // }
-      docFreq = termState.docFreq;
-      totalTermFreq = indexHasFreq ? termState.totalTermFreq : docFreq;
-      docTermStartFP = termState.docStartFP;
-      skipOffset = termState.skipOffset;
-      singletonDocID = termState.singletonDocID;
+      docFreq = state.docFreq;
+      totalTermFreq = indexHasFreq ? state.totalTermFreq : docFreq;
+      docTermStartFP = meta.docFP();
+      skipOffset = meta.skipOffset();
+      singletonDocID = meta.singletonDocID();
       if (docFreq > 1) {
         if (docIn == null) {
           // lazy init
@@ -685,18 +641,19 @@
         indexHasPayloads == fieldInfo.hasPayloads();
     }
     
-    public DocsAndPositionsEnum reset(Bits liveDocs, IntBlockTermState termState) throws IOException {
+    public DocsAndPositionsEnum reset(Bits liveDocs, BlockTermState state) throws IOException {
+      final Lucene41MetaData meta = (Lucene41MetaData) state.meta;
       this.liveDocs = liveDocs;
       // if (DEBUG) {
-      //   System.out.println("  FPR.reset: termState=" + termState);
+      //   System.out.println("  FPR.reset: state=" + state);
       // }
-      docFreq = termState.docFreq;
-      docTermStartFP = termState.docStartFP;
-      posTermStartFP = termState.posStartFP;
-      payTermStartFP = termState.payStartFP;
-      skipOffset = termState.skipOffset;
-      totalTermFreq = termState.totalTermFreq;
-      singletonDocID = termState.singletonDocID;
+      docFreq = state.docFreq;
+      totalTermFreq = state.totalTermFreq;
+      docTermStartFP = meta.docFP();
+      posTermStartFP = meta.posFP();
+      payTermStartFP = meta.payFP();
+      skipOffset = meta.skipOffset();
+      singletonDocID = meta.singletonDocID();
       if (docFreq > 1) {
         if (docIn == null) {
           // lazy init
@@ -706,12 +663,12 @@
       }
       posPendingFP = posTermStartFP;
       posPendingCount = 0;
-      if (termState.totalTermFreq < BLOCK_SIZE) {
+      if (state.totalTermFreq < BLOCK_SIZE) {
         lastPosBlockFP = posTermStartFP;
-      } else if (termState.totalTermFreq == BLOCK_SIZE) {
+      } else if (state.totalTermFreq == BLOCK_SIZE) {
         lastPosBlockFP = -1;
       } else {
-        lastPosBlockFP = posTermStartFP + termState.lastPosBlockOffset;
+        lastPosBlockFP = posTermStartFP + meta.lastPosBlockOffset();
       }
 
       doc = -1;
@@ -1142,18 +1099,19 @@
         indexHasPayloads == fieldInfo.hasPayloads();
     }
     
-    public EverythingEnum reset(Bits liveDocs, IntBlockTermState termState, int flags) throws IOException {
+    public EverythingEnum reset(Bits liveDocs, BlockTermState state, int flags) throws IOException {
+      final Lucene41MetaData meta = (Lucene41MetaData) state.meta;
       this.liveDocs = liveDocs;
       // if (DEBUG) {
-      //   System.out.println("  FPR.reset: termState=" + termState);
+      //   System.out.println("  FPR.reset: state=" + state);
       // }
-      docFreq = termState.docFreq;
-      docTermStartFP = termState.docStartFP;
-      posTermStartFP = termState.posStartFP;
-      payTermStartFP = termState.payStartFP;
-      skipOffset = termState.skipOffset;
-      totalTermFreq = termState.totalTermFreq;
-      singletonDocID = termState.singletonDocID;
+      docFreq = state.docFreq;
+      totalTermFreq = state.totalTermFreq;
+      docTermStartFP = meta.docFP();
+      posTermStartFP = meta.posFP();
+      payTermStartFP = meta.payFP();
+      skipOffset = meta.skipOffset();
+      singletonDocID = meta.singletonDocID();
       if (docFreq > 1) {
         if (docIn == null) {
           // lazy init
@@ -1164,12 +1122,12 @@
       posPendingFP = posTermStartFP;
       payPendingFP = payTermStartFP;
       posPendingCount = 0;
-      if (termState.totalTermFreq < BLOCK_SIZE) {
+      if (state.totalTermFreq < BLOCK_SIZE) {
         lastPosBlockFP = posTermStartFP;
-      } else if (termState.totalTermFreq == BLOCK_SIZE) {
+      } else if (state.totalTermFreq == BLOCK_SIZE) {
         lastPosBlockFP = -1;
       } else {
-        lastPosBlockFP = posTermStartFP + termState.lastPosBlockOffset;
+        lastPosBlockFP = posTermStartFP + meta.lastPosBlockOffset();
       }
 
       this.needsOffsets = (flags & DocsAndPositionsEnum.FLAG_OFFSETS) != 0;
Index: lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41PostingsWriter.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41PostingsWriter.java	(revision 1492635)
+++ lucene/core/src/java/org/apache/lucene/codecs/lucene41/Lucene41PostingsWriter.java	(working copy)
@@ -348,26 +348,8 @@
     }
   }
 
-  private static class PendingTerm {
-    public final long docStartFP;
-    public final long posStartFP;
-    public final long payStartFP;
-    public final long skipOffset;
-    public final long lastPosBlockOffset;
-    public final int singletonDocID;
+  private final List<Lucene41MetaData> pendingTerms = new ArrayList<Lucene41MetaData>();
 
-    public PendingTerm(long docStartFP, long posStartFP, long payStartFP, long skipOffset, long lastPosBlockOffset, int singletonDocID) {
-      this.docStartFP = docStartFP;
-      this.posStartFP = posStartFP;
-      this.payStartFP = payStartFP;
-      this.skipOffset = skipOffset;
-      this.lastPosBlockOffset = lastPosBlockOffset;
-      this.singletonDocID = singletonDocID;
-    }
-  }
-
-  private final List<PendingTerm> pendingTerms = new ArrayList<PendingTerm>();
-
   /** Called when we are done adding docs to this term */
   @Override
   public void finishTerm(TermStats stats) throws IOException {
@@ -517,7 +499,7 @@
     //   System.out.println("  payStartFP=" + payStartFP);
     // }
 
-    pendingTerms.add(new PendingTerm(docTermStartFP, posTermStartFP, payStartFP, skipOffset, lastPosBlockOffset, singletonDocID));
+    pendingTerms.add(new Lucene41MetaData(docTermStartFP, posTermStartFP, payStartFP, skipOffset, lastPosBlockOffset, singletonDocID));
     docBufferUpto = 0;
     posBufferUpto = 0;
     lastDocID = 0;
@@ -543,29 +525,29 @@
     long lastPosStartFP = 0;
     long lastPayStartFP = 0;
     for(int idx=limit-count; idx<limit; idx++) {
-      PendingTerm term = pendingTerms.get(idx);
+      Lucene41MetaData term = pendingTerms.get(idx);
 
-      if (term.singletonDocID == -1) {
-        bytesWriter.writeVLong(term.docStartFP - lastDocStartFP);
-        lastDocStartFP = term.docStartFP;
+      if (term.singletonDocID() == -1) {
+        bytesWriter.writeVLong(term.docFP() - lastDocStartFP);
+        lastDocStartFP = term.docFP();
       } else {
-        bytesWriter.writeVInt(term.singletonDocID);
+        bytesWriter.writeVInt(term.singletonDocID());
       }
 
       if (fieldHasPositions) {
-        bytesWriter.writeVLong(term.posStartFP - lastPosStartFP);
-        lastPosStartFP = term.posStartFP;
-        if (term.lastPosBlockOffset != -1) {
-          bytesWriter.writeVLong(term.lastPosBlockOffset);
+        bytesWriter.writeVLong(term.posFP() - lastPosStartFP);
+        lastPosStartFP = term.posFP();
+        if (term.lastPosBlockOffset() != -1) {
+          bytesWriter.writeVLong(term.lastPosBlockOffset());
         }
-        if ((fieldHasPayloads || fieldHasOffsets) && term.payStartFP != -1) {
-          bytesWriter.writeVLong(term.payStartFP - lastPayStartFP);
-          lastPayStartFP = term.payStartFP;
+        if ((fieldHasPayloads || fieldHasOffsets) && term.payFP() != -1) {
+          bytesWriter.writeVLong(term.payFP() - lastPayStartFP);
+          lastPayStartFP = term.payFP();
         }
       }
 
-      if (term.skipOffset != -1) {
-        bytesWriter.writeVLong(term.skipOffset);
+      if (term.skipOffset() != -1) {
+        bytesWriter.writeVLong(term.skipOffset());
       }
     }
 
