Index: lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java	(revision 1141501)
+++ lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java	(revision )
@@ -57,9 +57,11 @@
 public class FieldCacheImpl implements FieldCache {  // Made Public so that 
 	
   private Map<Class<?>,Cache> caches;
-  FieldCacheImpl() {
+
+  public FieldCacheImpl() {
     init();
   }
+
   private synchronized void init() {
     caches = new HashMap<Class<?>,Cache>(7);
     caches.put(Byte.TYPE, new Cache<ByteValues>(this));
Index: lucene/src/java/org/apache/lucene/index/IndexReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/IndexReader.java	(revision 1145594)
+++ lucene/src/java/org/apache/lucene/index/IndexReader.java	(revision )
@@ -1582,7 +1582,11 @@
   public int getTermInfosIndexDivisor() {
     throw new UnsupportedOperationException("This reader does not support this method.");
   }
-  
+
+  public FieldCache getFieldCache() {
+    throw new UnsupportedOperationException("This reader does not support this method.");
+  }
+
   public final IndexDocValues docValues(String field) throws IOException {
     final PerDocValues perDoc = perDocValues();
     if (perDoc == null) {
Index: lucene/src/java/org/apache/lucene/index/SegmentReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/SegmentReader.java	(revision 1148938)
+++ lucene/src/java/org/apache/lucene/index/SegmentReader.java	(revision )
@@ -18,27 +18,21 @@
  */
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.io.PrintStream;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.FieldSelector;
 import org.apache.lucene.index.FieldInfo.IndexOptions;
 import org.apache.lucene.index.codecs.PerDocValues;
+import org.apache.lucene.search.FieldCache;
+import org.apache.lucene.search.cache.*;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.util.BitVector;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.CloseableThreadLocal;
-import org.apache.lucene.util.StringHelper;
+import org.apache.lucene.util.*;
 
 /**
  * @lucene.experimental
@@ -72,8 +66,34 @@
 
   SegmentCoreReaders core;
 
+  private final AtomicFieldCache segmentCache;
+
+  public SegmentReader() {
+    this.segmentCache = new AtomicFieldCache();
+    this.readerFinishedListeners = new MapBackedSet<ReaderFinishedListener>(new ConcurrentHashMap<ReaderFinishedListener,Boolean>());
+    readerFinishedListeners.add(new ReaderFinishedListener() {
+
+      public void finished(IndexReader reader) {
+        segmentCache.purgeAllCaches();
+      }
+
+    });
+  }
+
+  public SegmentReader(AtomicFieldCache segmentCache) {
+    this.segmentCache = segmentCache;
+    this.readerFinishedListeners = new MapBackedSet<ReaderFinishedListener>(new ConcurrentHashMap<ReaderFinishedListener,Boolean>());
+    readerFinishedListeners.add(new ReaderFinishedListener() {
+
+      public void finished(IndexReader reader) {
+        SegmentReader.this.segmentCache.purgeAllCaches();
+      }
+
+    });
+  }
+
   /**
-   * Sets the initial value 
+   * Sets the initial value
    */
   private class FieldsReaderLocal extends CloseableThreadLocal<FieldsReader> {
     @Override
@@ -83,7 +103,7 @@
   }
 
   Map<String,SegmentNorms> norms = new HashMap<String,SegmentNorms>();
-  
+
   /**
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
@@ -103,7 +123,7 @@
                                   int termInfosIndexDivisor,
                                   IOContext context)
     throws CorruptIndexException, IOException {
-    
+
     SegmentReader instance = new SegmentReader();
     instance.readOnly = readOnly;
     instance.si = si;
@@ -150,7 +170,7 @@
 
     // Verify # deletes does not exceed maxDoc for this
     // segment:
-    assert si.getDelCount() <= maxDoc() : 
+    assert si.getDelCount() <= maxDoc() :
       "delete count mismatch: " + recomputedCount + ") exceeds max doc (" + maxDoc() + ") for segment " + si.name;
 
     return true;
@@ -168,7 +188,7 @@
     } else
       assert si.getDelCount() == 0;
   }
-  
+
   /**
    * Clones the norm bytes.  May be overridden by subclasses.  New and experimental.
    * @param bytes Byte array to clone
@@ -179,7 +199,7 @@
     System.arraycopy(bytes, 0, cloneBytes, 0, bytes.length);
     return cloneBytes;
   }
-  
+
   /**
    * Clones the deleteDocs BitVector.  May be overridden by subclasses. New and experimental.
    * @param bv BitVector to clone
@@ -216,10 +236,10 @@
   }
 
   synchronized SegmentReader reopenSegment(SegmentInfo si, boolean doClone, boolean openReadOnly) throws CorruptIndexException, IOException {
-    boolean deletionsUpToDate = (this.si.hasDeletions() == si.hasDeletions()) 
+    boolean deletionsUpToDate = (this.si.hasDeletions() == si.hasDeletions())
                                   && (!si.hasDeletions() || this.si.getDelFileName().equals(si.getDelFileName()));
     boolean normsUpToDate = true;
-    
+
     Set<Integer> fieldNormsChanged = new HashSet<Integer>();
     for (FieldInfo fi : core.fieldInfos) {
       int fieldNumber = fi.number;
@@ -233,14 +253,14 @@
     // also if both old and new readers aren't readonly, we clone to avoid sharing modifications
     if (normsUpToDate && deletionsUpToDate && !doClone && openReadOnly && readOnly) {
       return this;
-    }    
+    }
 
     // When cloning, the incoming SegmentInfos should not
     // have any changes in it:
     assert !doClone || (normsUpToDate && deletionsUpToDate);
 
     // clone reader
-    SegmentReader clone = new SegmentReader();
+    SegmentReader clone = new SegmentReader(segmentCache);
 
     boolean success = false;
     try {
@@ -258,7 +278,7 @@
         clone.hasChanges = hasChanges;
         hasChanges = false;
       }
-      
+
       if (doClone) {
         if (liveDocs != null) {
           liveDocsRef.incrementAndGet();
@@ -302,7 +322,7 @@
         clone.decRef();
       }
     }
-    
+
     return clone;
   }
 
@@ -374,7 +394,7 @@
   protected void doClose() throws IOException {
     termVectorsLocal.close();
     fieldsReaderLocal.close();
-    
+
     if (liveDocs != null) {
       liveDocsRef.decrementAndGet();
       // null so if an app hangs on to us we still free most ram
@@ -450,7 +470,7 @@
   List<String> files() throws IOException {
     return new ArrayList<String>(si.files());
   }
-  
+
   FieldInfos fieldInfos() {
     return core.fieldInfos;
   }
@@ -559,7 +579,7 @@
     final SegmentNorms norm = norms.get(field);
     if (norm == null) {
       // not indexed, or norms not stored
-      return null;  
+      return null;
     }
     return norm.bytes();
   }
@@ -592,7 +612,7 @@
         if (!si.hasSeparateNorms(fi.number)) {
           d = cfsDir;
         }
-        
+
         // singleNormFile means multiple norms share this file
         boolean singleNormFile = IndexFileNames.matchesExtension(fileName, IndexFileNames.NORMS_EXTENSION);
         IndexInput normInput = null;
@@ -617,7 +637,7 @@
           // if the size is exactly equal to maxDoc to detect a headerless file.
           // NOTE: remove this check in Lucene 5.0!
           String version = si.getVersion();
-          final boolean isUnversioned = 
+          final boolean isUnversioned =
             (version == null || StringHelper.getVersionComparator().compare(version, "3.2") < 0)
             && normInput.length() == maxDoc();
           if (isUnversioned) {
@@ -676,7 +696,7 @@
   TermVectorsReader getTermVectorsReaderOrig() {
     return core.getTermVectorsReaderOrig();
   }
-  
+
   /** Return a term frequency vector for the specified document and field. The
    *  vector returned contains term numbers and frequencies for all terms in
    *  the specified field of this document, if the field had storeTermVector
@@ -688,13 +708,13 @@
     // Check if this field is invalid or has no stored term vector
     ensureOpen();
     FieldInfo fi = core.fieldInfos.fieldInfo(field);
-    if (fi == null || !fi.storeTermVector) 
+    if (fi == null || !fi.storeTermVector)
       return null;
-    
+
     TermVectorsReader termVectorsReader = getTermVectorsReader();
     if (termVectorsReader == null)
       return null;
-    
+
     return termVectorsReader.get(docNumber, field);
   }
 
@@ -737,14 +757,14 @@
   @Override
   public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
     ensureOpen();
-    
+
     TermVectorsReader termVectorsReader = getTermVectorsReader();
     if (termVectorsReader == null)
       return null;
-    
+
     return termVectorsReader.get(docNumber);
   }
-  
+
   /** {@inheritDoc} */
   @Override
   public String toString() {
@@ -755,7 +775,7 @@
     buffer.append(si.toString(core.dir, pendingDeleteCount));
     return buffer.toString();
   }
-  
+
   @Override
   public ReaderContext getTopReaderContext() {
     return readerContext;
@@ -767,7 +787,7 @@
   public String getSegmentName() {
     return core.segment;
   }
-  
+
   /**
    * Return the SegmentInfo of the segment this reader is reading.
    */
@@ -824,6 +844,11 @@
   }
 
   @Override
+  public FieldCache getFieldCache() {
+    return segmentCache;
+  }
+
+  @Override
   protected void readerFinished() {
     // Do nothing here -- we have more careful control on
     // when to notify that a SegmentReader has finished,
@@ -832,9 +857,318 @@
     // longer used (all SegmentReaders sharing it have been
     // closed).
   }
-  
+
   @Override
   public PerDocValues perDocValues() throws IOException {
     return core.perDocProducer;
   }
+
+  private class AtomicFieldCache implements FieldCache {
+
+    private final Map<Class<?>,Cache> cache;
+
+    private AtomicFieldCache() {
+      cache = new HashMap<Class<?>,Cache>(7);
+      cache.put(Byte.TYPE, new Cache<CachedArray.ByteValues>(this));
+      cache.put(Short.TYPE, new Cache<CachedArray.ShortValues>(this));
+      cache.put(Integer.TYPE, new Cache<CachedArray.IntValues>(this));
+      cache.put(Float.TYPE, new Cache<CachedArray.FloatValues>(this));
+      cache.put(Long.TYPE, new Cache<CachedArray.LongValues>(this));
+      cache.put(Double.TYPE, new Cache<CachedArray.DoubleValues>(this));
+      cache.put(DocTermsIndex.class, new Cache<DocTermsIndex>(this));
+      cache.put(DocTerms.class, new Cache<DocTerms>(this));
-}
+    }
+
+    // inherit javadocs
+    public byte[] getBytes (IndexReader reader, String field) throws IOException {
+      return getBytes(null, field, new ByteValuesCreator(field, null)).values;
+    }
+
+    // inherit javadocs
+    public byte[] getBytes(IndexReader reader, String field, ByteParser parser) throws IOException {
+      return getBytes(null, field, new ByteValuesCreator(field, parser)).values;
+    }
+
+    @SuppressWarnings("unchecked")
+    public CachedArray.ByteValues getBytes(IndexReader reader, String field, EntryCreator<CachedArray.ByteValues> creator ) throws IOException {
+      return (CachedArray.ByteValues) cache.get(Byte.TYPE).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    // inherit javadocs
+    public short[] getShorts (IndexReader reader, String field) throws IOException {
+      return getShorts(null, field, new ShortValuesCreator(field,null)).values;
+    }
+
+    // inherit javadocs
+    public short[] getShorts(IndexReader reader, String field, ShortParser parser) throws IOException {
+      return getShorts(null, field, new ShortValuesCreator(field,parser)).values;
+    }
+
+    @SuppressWarnings("unchecked")
+    public CachedArray.ShortValues getShorts(IndexReader reader, String field, EntryCreator<CachedArray.ShortValues> creator ) throws IOException {
+      return (CachedArray.ShortValues) cache.get(Short.TYPE).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    // inherit javadocs
+    public int[] getInts (IndexReader reader, String field) throws IOException {
+      return getInts(null, field, new IntValuesCreator( field, null )).values;
+    }
+
+    // inherit javadocs
+    public int[] getInts(IndexReader reader, String field, IntParser parser) throws IOException {
+      return getInts(null, field, new IntValuesCreator( field, parser )).values;
+    }
+
+    @SuppressWarnings("unchecked")
+    public CachedArray.IntValues getInts(IndexReader reader, String field, EntryCreator<CachedArray.IntValues> creator ) throws IOException {
+      return (CachedArray.IntValues) cache.get(Integer.TYPE).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    // inherit javadocs
+    public float[] getFloats (IndexReader reader, String field) throws IOException {
+      return getFloats(null, field, new FloatValuesCreator( field, null ) ).values;
+    }
+
+    // inherit javadocs
+    public float[] getFloats(IndexReader reader, String field, FloatParser parser) throws IOException {
+      return getFloats(null, field, new FloatValuesCreator( field, parser ) ).values;
+    }
+
+    @SuppressWarnings("unchecked")
+    public CachedArray.FloatValues getFloats(IndexReader reader, String field, EntryCreator<CachedArray.FloatValues> creator ) throws IOException {
+      return (CachedArray.FloatValues) cache.get(Float.TYPE).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    public long[] getLongs(IndexReader reader, String field) throws IOException {
+      return getLongs(null, field, new LongValuesCreator( field, null ) ).values;
+    }
+
+    // inherit javadocs
+    public long[] getLongs(IndexReader reader, String field, FieldCache.LongParser parser) throws IOException {
+      return getLongs(null, field, new LongValuesCreator( field, parser ) ).values;
+    }
+
+    @SuppressWarnings("unchecked")
+    public CachedArray.LongValues getLongs(IndexReader reader, String field, EntryCreator<CachedArray.LongValues> creator ) throws IOException {
+      return (CachedArray.LongValues) cache.get(Long.TYPE).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    // inherit javadocs
+    public double[] getDoubles(IndexReader reader, String field) throws IOException {
+      return getDoubles(null, field, new DoubleValuesCreator( field, null ) ).values;
+    }
+
+    // inherit javadocs
+    public double[] getDoubles(IndexReader reader, String field, FieldCache.DoubleParser parser) throws IOException {
+      return getDoubles(null, field, new DoubleValuesCreator( field, parser ) ).values;
+    }
+
+    @SuppressWarnings("unchecked")
+    public CachedArray.DoubleValues getDoubles(IndexReader reader, String field, EntryCreator<CachedArray.DoubleValues> creator ) throws IOException {
+      return (CachedArray.DoubleValues) cache.get(Double.TYPE).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    public DocTermsIndex getTermsIndex(IndexReader reader, String field) throws IOException {
+      return getTermsIndex(field, new DocTermsIndexCreator(field));
+    }
+
+    public DocTermsIndex getTermsIndex(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
+      return getTermsIndex(field, new DocTermsIndexCreator(field,
+          fasterButMoreRAM ? DocTermsIndexCreator.FASTER_BUT_MORE_RAM : 0));
+    }
+
+    @SuppressWarnings("unchecked")
+    public DocTermsIndex getTermsIndex(String field, EntryCreator<DocTermsIndex> creator) throws IOException {
+      return (DocTermsIndex) cache.get(DocTermsIndex.class).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    // TODO: this if DocTermsIndex was already created, we
+    // should share it...
+    public DocTerms getTerms(IndexReader reader, String field) throws IOException {
+      return getTerms(field, new DocTermsCreator(field));
+    }
+
+    public DocTerms getTerms(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
+      return getTerms(field, new DocTermsCreator(field,
+          fasterButMoreRAM ? DocTermsCreator.FASTER_BUT_MORE_RAM : 0));
+    }
+
+    @SuppressWarnings("unchecked")
+    public DocTerms getTerms(String field, EntryCreator<DocTerms> creator) throws IOException {
+      return (DocTerms) cache.get(DocTerms.class).get(SegmentReader.this, new Entry(field, creator));
+    }
+
+    private volatile PrintStream infoStream;
+
+    public void setInfoStream(PrintStream stream) {
+      infoStream = stream;
+    }
+
+    public PrintStream getInfoStream() {
+      return infoStream;
+    }
+
+    public CacheEntry[] getCacheEntries() {
+      List<CacheEntry> result = new ArrayList<CacheEntry>(17);
+      for(final Map.Entry<Class<?>,Cache> cacheEntry: cache.entrySet()) {
+        final Class<?> cacheType = cacheEntry.getKey();
+        final Cache<?> cache = cacheEntry.getValue();
+        synchronized(cache.readerCache) {
+          for(final Map.Entry<?, Object> mapEntry : cache.readerCache.entrySet() ) {
+            Entry entry = (Entry) mapEntry.getKey();
+            result.add(new CacheEntryImpl(entry.field,cacheType, entry.creator, mapEntry.getValue()));
+          }
+        }
+      }
+      return result.toArray(new CacheEntry[result.size()]);
+    }
+
+    public void purgeAllCaches() {
+      cache.clear();
+    }
+
+    public void purge(IndexReader r) {
+      purgeAllCaches();
+    }
+  }
+
+  /** Expert: Internal cache. */
+  final static class Cache<T> {
+
+    final FieldCache wrapper;
+    final Map<Entry<T>,Object> readerCache;
+
+    Cache(FieldCache wrapper) {
+      this.wrapper = wrapper;
+      this.readerCache = new HashMap<Entry<T>,Object>();
+    }
+
+    protected Object createValue(IndexReader reader, Entry entryKey) throws IOException {
+      return entryKey.creator.create(reader);
+    }
+
+    @SuppressWarnings("unchecked")
+    public Object get(IndexReader reader, Entry<T> key) throws IOException {
+      Object value;
+      synchronized (readerCache) {
+        value = readerCache.get(key);
+        if (value == null) {
+          value = new FieldCache.CreationPlaceholder();
+          readerCache.put(key, value);
+        }
+      }
+      if (value instanceof FieldCache.CreationPlaceholder) {
+        synchronized (value) {
+          FieldCache.CreationPlaceholder progress = (FieldCache.CreationPlaceholder) value;
+          if (progress.value != null) {
+            return progress.value;
+          }
+          progress.value = createValue(reader, key);
+          synchronized (readerCache) {
+            readerCache.put(key, progress.value);
+          }
+
+          // Only check if key.custom (the parser) is
+          // non-null; else, we check twice for a single
+          // call to FieldCache.getXXX
+          if (key.creator != null && wrapper != null) {
+            final PrintStream infoStream = wrapper.getInfoStream();
+            if (infoStream != null) {
+              printNewInsanity(infoStream, progress.value);
+            }
+          }
+        }
+      }
+
+      // Validate new entries
+      if( key.creator.shouldValidate() ) {
+        key.creator.validate( (T)value, reader);
+      }
+      return value;
+    }
+
+    private void printNewInsanity(PrintStream infoStream, Object value) {
+      final FieldCacheSanityChecker.Insanity[] insanities = FieldCacheSanityChecker.checkSanity(wrapper);
+      for(int i=0;i<insanities.length;i++) {
+        final FieldCacheSanityChecker.Insanity insanity = insanities[i];
+        final FieldCache.CacheEntry[] entries = insanity.getCacheEntries();
+        for(int j=0;j<entries.length;j++) {
+          if (entries[j].getValue() == value) {
+            // OK this insanity involves our entry
+            infoStream.println("WARNING: new FieldCache insanity created\nDetails: " + insanity.toString());
+            infoStream.println("\nStack:\n");
+            new Throwable().printStackTrace(infoStream);
+            break;
+          }
+        }
+      }
+    }
+  }
+
+  /** Expert: Every composite-key in the internal cache is of this type. */
+  static class Entry<T> {
+    final String field;        // which Fieldable
+    final EntryCreator<T> creator;       // which custom comparator or parser
+
+    /** Creates one of these objects for a custom comparator/parser. */
+    Entry (String field, EntryCreator<T> custom) {
+      this.field = field;
+      this.creator = custom;
+    }
+
+    /** Two of these are equal iff they reference the same field and type. */
+    @Override
+    public boolean equals (Object o) {
+      if (o instanceof Entry) {
+        Entry other = (Entry) o;
+        if (other.field.equals(field)) {
+          if (other.creator == null) {
+            if (creator == null) return true;
+          } else if (other.creator.equals (creator)) {
+            return true;
+          }
+        }
+      }
+      return false;
+    }
+
+    /** Composes a hashcode based on the field and type. */
+    @Override
+    public int hashCode() {
+      return field.hashCode() ^ (creator==null ? 0 : creator.hashCode());
+    }
+  }
+
+  private final class CacheEntryImpl extends FieldCache.CacheEntry {
+
+    private final String fieldName;
+    private final Class<?> cacheType;
+    private final EntryCreator custom;
+    private final Object value;
+
+    CacheEntryImpl(String fieldName,
+                   Class<?> cacheType,
+                   EntryCreator custom,
+                   Object value) {
+        this.fieldName = fieldName;
+        this.cacheType = cacheType;
+        this.custom = custom;
+        this.value = value;
+    }
+
+    @Override
+    public Object getReaderKey() { return SegmentReader.this.getCoreCacheKey(); }
+
+    @Override
+    public String getFieldName() { return fieldName; }
+
+    @Override
+    public Class<?> getCacheType() { return cacheType; }
+
+    @Override
+    public Object getCustom() { return custom; }
+
+    @Override
+    public Object getValue() { return value; }
+  }
+}
\ No newline at end of file
Index: lucene/src/java/org/apache/lucene/index/SlowMultiReaderWrapper.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/SlowMultiReaderWrapper.java	(revision 1143415)
+++ lucene/src/java/org/apache/lucene/index/SlowMultiReaderWrapper.java	(revision )
@@ -21,6 +21,8 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.lucene.search.FieldCache;
+import org.apache.lucene.search.FieldCacheImpl;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.ReaderUtil; // javadoc
 
@@ -52,9 +54,11 @@
 
 public final class SlowMultiReaderWrapper extends FilterIndexReader {
 
+  private final static FieldCache DEFAULT = new FieldCacheImpl();
+
   private final ReaderContext readerContext;
   private final Map<String,byte[]> normsCache = new HashMap<String,byte[]>();
-  
+
   public SlowMultiReaderWrapper(IndexReader other) {
     super(other);
     readerContext = new AtomicReaderContext(this); // emulate atomic reader!
@@ -114,4 +118,9 @@
     }
     in.doSetNorm(n, field, value);
   }
+
+  @Override
+  public FieldCache getFieldCache() {
+    return DEFAULT;
-}
+  }
+}
Index: lucene/src/java/org/apache/lucene/search/FieldCache.java
===================================================================
--- lucene/src/java/org/apache/lucene/search/FieldCache.java	(revision 1068526)
+++ lucene/src/java/org/apache/lucene/search/FieldCache.java	(revision )
@@ -19,6 +19,7 @@
 
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.TermsEnum;
+import org.apache.lucene.search.cache.CachedArray;
 import org.apache.lucene.search.cache.EntryCreator;
 import org.apache.lucene.search.cache.CachedArray.*;
 import org.apache.lucene.util.NumericUtils;
@@ -44,7 +45,7 @@
 public interface FieldCache {
 
   public static final class CreationPlaceholder {
-    Object value;
+    public Object value;
   }
 
   /**
@@ -112,6 +113,7 @@
   }
 
   /** Expert: The cache used internally by sorting and range query classes. */
+  @Deprecated // Remove this?
   public static FieldCache DEFAULT = new FieldCacheImpl();
 
   /** The default parser for byte values, which are encoded by {@link Byte#toString(byte)} */
