Index: lucene/src/java/org/apache/lucene/index/SlowMultiReaderWrapper.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/SlowMultiReaderWrapper.java (revision 1143415)
+++ lucene/src/java/org/apache/lucene/index/SlowMultiReaderWrapper.java (revision )
@@ -17,16 +17,18 @@
* limitations under the License.
*/
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
+import org.apache.lucene.index.codecs.PerDocValues;
+import org.apache.lucene.search.FieldCache;
+import org.apache.lucene.search.cache.*;
import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.ReaderUtil; // javadoc
+import org.apache.lucene.util.FieldCacheSanityChecker;
+import org.apache.lucene.util.MapBackedSet;
+import org.apache.lucene.util.ReaderUtil;
-import org.apache.lucene.index.DirectoryReader; // javadoc
-import org.apache.lucene.index.MultiReader; // javadoc
-import org.apache.lucene.index.codecs.PerDocValues;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
/**
* This class forces a composite reader (eg a {@link
@@ -37,29 +39,41 @@
* APIs on-the-fly, using the static methods in {@link
* MultiFields}, by stepping through the sub-readers to
* merge fields/terms, appending docs, etc.
- *
+ *
* If you ever hit an UnsupportedOperationException saying
* "please use MultiFields.XXX instead", the simple
* but non-performant workaround is to wrap your reader
* using this class.
- *
+ *
* NOTE : this class almost always results in a
* performance hit. If this is important to your use case,
* it's better to get the sequential sub readers (see {@link
* ReaderUtil#gatherSubReaders}, instead, and iterate through them
* yourself.
*/
-
public final class SlowMultiReaderWrapper extends FilterIndexReader {
+ private final static InsaneFieldCache insaneFieldCache = new InsaneFieldCache();
+
private final ReaderContext readerContext;
- private final Map normsCache = new HashMap();
+ private final Map normsCache = new HashMap();
-
+
public SlowMultiReaderWrapper(IndexReader other) {
super(other);
readerContext = new AtomicReaderContext(this); // emulate atomic reader!
+
+ if (in.readerFinishedListeners == null) {
+ in.readerFinishedListeners = new MapBackedSet(new ConcurrentHashMap());
- }
+ }
+ in.addReaderFinishedListener(new ReaderFinishedListener() {
+ public void finished(IndexReader reader) {
+ insaneFieldCache.purge(reader);
+ }
+
+ });
+ }
+
@Override
public String toString() {
return "SlowMultiReaderWrapper(" + in + ")";
@@ -79,7 +93,7 @@
public Bits getLiveDocs() {
return MultiFields.getLiveDocs(in);
}
-
+
@Override
public IndexReader[] getSequentialSubReaders() {
return null;
@@ -95,23 +109,528 @@
return null;
if (normsCache.containsKey(field)) // cached omitNorms, not missing key
return null;
-
+
bytes = MultiNorms.norms(in, field);
normsCache.put(field, bytes);
return bytes;
}
-
+
@Override
public ReaderContext getTopReaderContext() {
return readerContext;
}
-
+
@Override
- protected void doSetNorm(int n, String field, byte value)
- throws CorruptIndexException, IOException {
+ protected void doSetNorm(int n, String field, byte value) throws CorruptIndexException, IOException {
synchronized(normsCache) {
normsCache.remove(field);
}
in.doSetNorm(n, field, value);
}
+
+ @Override
+ public AtomicFieldCache getFieldCache() {
+ return new AtomicFieldCache() {
+
+ public byte[] getBytes(String field) throws IOException {
+ return insaneFieldCache.getBytes(in, field);
-}
+ }
+
+ public byte[] getBytes(String field, FieldCache.ByteParser parser) throws IOException {
+ return insaneFieldCache.getBytes(in, field, parser);
+ }
+
+ public CachedArray.ByteValues getBytes(String field, EntryCreator creator) throws IOException {
+ return insaneFieldCache.getBytes(in, field, creator);
+ }
+
+ public short[] getShorts(String field) throws IOException {
+ return insaneFieldCache.getShorts(in, field);
+ }
+
+ public short[] getShorts(String field, FieldCache.ShortParser parser) throws IOException {
+ return insaneFieldCache.getShorts(in, field);
+ }
+
+ public CachedArray.ShortValues getShorts(String field, EntryCreator creator) throws IOException {
+ return insaneFieldCache.getShorts(in, field, creator);
+ }
+
+ public int[] getInts(String field) throws IOException {
+ return insaneFieldCache.getInts(in, field);
+ }
+
+ public int[] getInts(String field, FieldCache.IntParser parser) throws IOException {
+ return insaneFieldCache.getInts(in, field, parser);
+ }
+
+ public CachedArray.IntValues getInts(String field, EntryCreator creator) throws IOException {
+ return insaneFieldCache.getInts(in, field, creator);
+ }
+
+ public float[] getFloats(String field) throws IOException {
+ return insaneFieldCache.getFloats(in, field);
+ }
+
+ public float[] getFloats(String field, FieldCache.FloatParser parser) throws IOException {
+ return insaneFieldCache.getFloats(in, field, parser);
+ }
+
+ public CachedArray.FloatValues getFloats(String field, EntryCreator creator) throws IOException {
+ return insaneFieldCache.getFloats(in, field, creator);
+ }
+
+ public long[] getLongs(String field) throws IOException {
+ return insaneFieldCache.getLongs(in, field);
+ }
+
+ public long[] getLongs(String field, FieldCache.LongParser parser) throws IOException {
+ return insaneFieldCache.getLongs(in, field, parser);
+ }
+
+ public CachedArray.LongValues getLongs(String field, EntryCreator creator) throws IOException {
+ return insaneFieldCache.getLongs(in, field, creator);
+ }
+
+ public double[] getDoubles(String field) throws IOException {
+ return insaneFieldCache.getDoubles(in, field);
+ }
+
+ public double[] getDoubles(String field, FieldCache.DoubleParser parser) throws IOException {
+ return insaneFieldCache.getDoubles(in, field, parser);
+ }
+
+ public CachedArray.DoubleValues getDoubles(String field, EntryCreator creator) throws IOException {
+ return insaneFieldCache.getDoubles(in, field, creator);
+ }
+
+ public FieldCache.DocTerms getTerms(String field) throws IOException {
+ return insaneFieldCache.getTerms(in, field);
+ }
+
+ public FieldCache.DocTerms getTerms(String field, boolean fasterButMoreRAM) throws IOException {
+ return insaneFieldCache.getTerms(in, field, fasterButMoreRAM);
+ }
+
+ public FieldCache.DocTermsIndex getTermsIndex(String field) throws IOException {
+ return insaneFieldCache.getTermsIndex(in, field);
+ }
+
+ public FieldCache.DocTermsIndex getTermsIndex(String field, boolean fasterButMoreRAM) throws IOException {
+ return insaneFieldCache.getTermsIndex(in, field, fasterButMoreRAM);
+ }
+
+ public FieldCache.CacheEntry[] getCacheEntries() {
+ return insaneFieldCache.getCacheEntries();
+ }
+
+ public void purgeCache() {
+ insaneFieldCache.purge(in);
+ }
+
+ public void setInfoStream(PrintStream stream) {
+ insaneFieldCache.setInfoStream(stream);
+ }
+
+ public PrintStream getInfoStream() {
+ return insaneFieldCache.getInfoStream();
+ }
+ };
+ }
+
+ public static synchronized FieldCache.CacheEntry[] getCacheEntries() {
+ return insaneFieldCache.getCacheEntries();
+ }
+
+ public static synchronized void purgeAllCaches() {
+ insaneFieldCache.purgeAllCaches();
+ }
+
+ public static void setInfoStream(PrintStream stream) {
+ insaneFieldCache.setInfoStream(stream);
+ }
+
+ public static PrintStream getInfoStream() {
+ return insaneFieldCache.getInfoStream();
+ }
+
+ private static class InsaneFieldCache {
+
+ private Map, Cache> caches;
+
+ InsaneFieldCache() {
+ init();
+ }
+
+ private synchronized void init() {
+ caches = new HashMap, Cache>(7);
+ caches.put(Byte.TYPE, new Cache(this));
+ caches.put(Short.TYPE, new Cache(this));
+ caches.put(Integer.TYPE, new Cache(this));
+ caches.put(Float.TYPE, new Cache(this));
+ caches.put(Long.TYPE, new Cache(this));
+ caches.put(Double.TYPE, new Cache(this));
+ caches.put(FieldCache.DocTermsIndex.class, new Cache(this));
+ caches.put(FieldCache.DocTerms.class, new Cache(this));
+ }
+
+ public synchronized void purgeAllCaches() {
+ init();
+ }
+
+ public synchronized void purge(IndexReader r) {
+ for (Cache c : caches.values()) {
+ c.purge(r);
+ }
+ }
+
+ public synchronized FieldCache.CacheEntry[] getCacheEntries() {
+ List result = new ArrayList(17);
+ for (final Map.Entry, Cache> cacheEntry : caches.entrySet()) {
+ final Cache> cache = cacheEntry.getValue();
+ final Class> cacheType = cacheEntry.getKey();
+ synchronized (cache.readerCache) {
+ for (Object readerKey : cache.readerCache.keySet()) {
+ Map, Object> innerCache = cache.readerCache.get(readerKey);
+ for (final Map.Entry, Object> mapEntry : innerCache.entrySet()) {
+ Entry entry = (Entry) mapEntry.getKey();
+ result.add(new CacheEntryImpl(readerKey, entry.field,
+ cacheType, entry.creator,
+ mapEntry.getValue()));
+ }
+ }
+ }
+ }
+ return result.toArray(new FieldCache.CacheEntry[result.size()]);
+ }
+
+ private static final class CacheEntryImpl extends FieldCache.CacheEntry {
+ private final Object readerKey;
+ private final String fieldName;
+ private final Class> cacheType;
+ private final EntryCreator custom;
+ private final Object value;
+
+ CacheEntryImpl(Object readerKey, String fieldName,
+ Class> cacheType,
+ EntryCreator custom,
+ Object value) {
+ this.readerKey = readerKey;
+ this.fieldName = fieldName;
+ this.cacheType = cacheType;
+ this.custom = custom;
+ this.value = value;
+
+ // :HACK: for testing.
+// if (null != locale || SortField.CUSTOM != sortFieldType) {
+// throw new RuntimeException("Locale/sortFieldType: " + this);
+// }
+
+ }
+
+ @Override
+ public Object getReaderKey() {
+ return readerKey;
+ }
+
+ @Override
+ public String getFieldName() {
+ return fieldName;
+ }
+
+ @Override
+ public Class> getCacheType() {
+ return cacheType;
+ }
+
+ @Override
+ public Object getCustom() {
+ return custom;
+ }
+
+ @Override
+ public Object getValue() {
+ return value;
+ }
+ }
+
+ final static IndexReader.ReaderFinishedListener purgeReader = new IndexReader.ReaderFinishedListener() {
+ @Override
+ public void finished(IndexReader reader) {
+ insaneFieldCache.purge(reader);
+ }
+ };
+
+ /**
+ * Expert: Internal cache.
+ */
+ final static class Cache {
+ Cache() {
+ this.wrapper = null;
+ }
+
+ Cache(InsaneFieldCache wrapper) {
+ this.wrapper = wrapper;
+ }
+
+ final InsaneFieldCache wrapper;
+
+ final Map, Object>> readerCache = new WeakHashMap, Object>>();
+
+ protected Object createValue(IndexReader reader, Entry entryKey) throws IOException {
+ return entryKey.creator.create(reader);
+ }
+
+ /**
+ * Remove this reader from the cache, if present.
+ */
+ public void purge(IndexReader r) {
+ Object readerKey = r.getCoreCacheKey();
+ synchronized (readerCache) {
+ readerCache.remove(readerKey);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object get(IndexReader reader, Entry key) throws IOException {
+ Map, Object> innerCache;
+ Object value;
+ final Object readerKey = reader.getCoreCacheKey();
+ synchronized (readerCache) {
+ innerCache = readerCache.get(readerKey);
+ if (innerCache == null) {
+ // First time this reader is using FieldCache
+ innerCache = new HashMap, Object>();
+ readerCache.put(readerKey, innerCache);
+ reader.addReaderFinishedListener(purgeReader);
+ value = null;
+ } else {
+ value = innerCache.get(key);
+ }
+ if (value == null) {
+ value = new FieldCache.CreationPlaceholder();
+ innerCache.put(key, value);
+ }
+ }
+ if (value instanceof FieldCache.CreationPlaceholder) {
+ synchronized (value) {
+ FieldCache.CreationPlaceholder progress = (FieldCache.CreationPlaceholder) value;
+ if (progress.value == null) {
+ progress.value = createValue(reader, key);
+ synchronized (readerCache) {
+ innerCache.put(key, progress.value);
+ }
+
+ // Only check if key.custom (the parser) is
+ // non-null; else, we check twice for a single
+ // call to FieldCache.getXXX
+ if (key.creator != null && wrapper != null) {
+ final PrintStream infoStream = wrapper.getInfoStream();
+ if (infoStream != null) {
+ printNewInsanity(infoStream, progress.value);
+ }
+ }
+ }
+ return progress.value;
+ }
+ }
+
+ // Validate new entries
+ if (key.creator.shouldValidate()) {
+ key.creator.validate((T) value, reader);
+ }
+ return value;
+ }
+
+ private void printNewInsanity(PrintStream infoStream, Object value) {
+ final FieldCacheSanityChecker.Insanity[] insanities = FieldCacheSanityChecker.checkSanity(wrapper.getCacheEntries());
+ for (int i = 0; i < insanities.length; i++) {
+ final FieldCacheSanityChecker.Insanity insanity = insanities[i];
+ final FieldCache.CacheEntry[] entries = insanity.getCacheEntries();
+ for (int j = 0; j < entries.length; j++) {
+ if (entries[j].getValue() == value) {
+ // OK this insanity involves our entry
+ infoStream.println("WARNING: new FieldCache insanity created\nDetails: " + insanity.toString());
+ infoStream.println("\nStack:\n");
+ new Throwable().printStackTrace(infoStream);
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Expert: Every composite-key in the internal cache is of this type.
+ */
+ static class Entry {
+ final String field; // which Fieldable
+ final EntryCreator creator; // which custom comparator or parser
+
+ /**
+ * Creates one of these objects for a custom comparator/parser.
+ */
+ Entry(String field, EntryCreator custom) {
+ this.field = field;
+ this.creator = custom;
+ }
+
+ /**
+ * Two of these are equal iff they reference the same field and type.
+ */
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof Entry) {
+ Entry other = (Entry) o;
+ if (other.field.equals(field)) {
+ if (other.creator == null) {
+ if (creator == null) return true;
+ } else if (other.creator.equals(creator)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Composes a hashcode based on the field and type.
+ */
+ @Override
+ public int hashCode() {
+ return field.hashCode() ^ (creator == null ? 0 : creator.hashCode());
+ }
+ }
+
+ // inherit javadocs
+ public byte[] getBytes(IndexReader reader, String field) throws IOException {
+ return getBytes(reader, field, new ByteValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public byte[] getBytes(IndexReader reader, String field, FieldCache.ByteParser parser) throws IOException {
+ return getBytes(reader, field, new ByteValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.ByteValues getBytes(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (CachedArray.ByteValues) caches.get(Byte.TYPE).get(reader, new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public short[] getShorts(IndexReader reader, String field) throws IOException {
+ return getShorts(reader, field, new ShortValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public short[] getShorts(IndexReader reader, String field, FieldCache.ShortParser parser) throws IOException {
+ return getShorts(reader, field, new ShortValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.ShortValues getShorts(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (CachedArray.ShortValues) caches.get(Short.TYPE).get(reader, new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public int[] getInts(IndexReader reader, String field) throws IOException {
+ return getInts(reader, field, new IntValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public int[] getInts(IndexReader reader, String field, FieldCache.IntParser parser) throws IOException {
+ return getInts(reader, field, new IntValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.IntValues getInts(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (CachedArray.IntValues) caches.get(Integer.TYPE).get(reader, new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public float[] getFloats(IndexReader reader, String field) throws IOException {
+ return getFloats(reader, field, new FloatValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public float[] getFloats(IndexReader reader, String field, FieldCache.FloatParser parser) throws IOException {
+ return getFloats(reader, field, new FloatValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.FloatValues getFloats(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (CachedArray.FloatValues) caches.get(Float.TYPE).get(reader, new Entry(field, creator));
+ }
+
+ public long[] getLongs(IndexReader reader, String field) throws IOException {
+ return getLongs(reader, field, new LongValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public long[] getLongs(IndexReader reader, String field, FieldCache.LongParser parser) throws IOException {
+ return getLongs(reader, field, new LongValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.LongValues getLongs(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (CachedArray.LongValues) caches.get(Long.TYPE).get(reader, new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public double[] getDoubles(IndexReader reader, String field) throws IOException {
+ return getDoubles(reader, field, new DoubleValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public double[] getDoubles(IndexReader reader, String field, FieldCache.DoubleParser parser) throws IOException {
+ return getDoubles(reader, field, new DoubleValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.DoubleValues getDoubles(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (CachedArray.DoubleValues) caches.get(Double.TYPE).get(reader, new Entry(field, creator));
+ }
+
+ public FieldCache.DocTermsIndex getTermsIndex(IndexReader reader, String field) throws IOException {
+ return getTermsIndex(reader, field, new DocTermsIndexCreator(field));
+ }
+
+ public FieldCache.DocTermsIndex getTermsIndex(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
+ return getTermsIndex(reader, field, new DocTermsIndexCreator(field,
+ fasterButMoreRAM ? DocTermsIndexCreator.FASTER_BUT_MORE_RAM : 0));
+ }
+
+ @SuppressWarnings("unchecked")
+ public FieldCache.DocTermsIndex getTermsIndex(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (FieldCache.DocTermsIndex) caches.get(FieldCache.DocTermsIndex.class).get(reader, new Entry(field, creator));
+ }
+
+ // TODO: this if DocTermsIndex was already created, we
+ // should share it...
+ public FieldCache.DocTerms getTerms(IndexReader reader, String field) throws IOException {
+ return getTerms(reader, field, new DocTermsCreator(field));
+ }
+
+ public FieldCache.DocTerms getTerms(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
+ return getTerms(reader, field, new DocTermsCreator(field,
+ fasterButMoreRAM ? DocTermsCreator.FASTER_BUT_MORE_RAM : 0));
+ }
+
+ @SuppressWarnings("unchecked")
+ public FieldCache.DocTerms getTerms(IndexReader reader, String field, EntryCreator creator) throws IOException {
+ return (FieldCache.DocTerms) caches.get(FieldCache.DocTerms.class).get(reader, new Entry(field, creator));
+ }
+
+ private volatile PrintStream infoStream;
+
+ public void setInfoStream(PrintStream stream) {
+ infoStream = stream;
+ }
+
+ public PrintStream getInfoStream() {
+ return infoStream;
+ }
+
+ }
+
+}
Index: lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java (revision 1141501)
+++ lucene/src/java/org/apache/lucene/search/FieldCacheImpl.java (revision )
@@ -17,32 +17,15 @@
* limitations under the License.
*/
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.WeakHashMap;
-
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.SlowMultiReaderWrapper;
import org.apache.lucene.search.cache.ByteValuesCreator;
-import org.apache.lucene.search.cache.DocTermsCreator;
-import org.apache.lucene.search.cache.DocTermsIndexCreator;
-import org.apache.lucene.search.cache.DoubleValuesCreator;
+import org.apache.lucene.search.cache.CachedArray.*;
import org.apache.lucene.search.cache.EntryCreator;
-import org.apache.lucene.search.cache.FloatValuesCreator;
-import org.apache.lucene.search.cache.IntValuesCreator;
-import org.apache.lucene.search.cache.LongValuesCreator;
-import org.apache.lucene.search.cache.ShortValuesCreator;
-import org.apache.lucene.search.cache.CachedArray.ByteValues;
-import org.apache.lucene.search.cache.CachedArray.DoubleValues;
-import org.apache.lucene.search.cache.CachedArray.FloatValues;
-import org.apache.lucene.search.cache.CachedArray.IntValues;
-import org.apache.lucene.search.cache.CachedArray.LongValues;
-import org.apache.lucene.search.cache.CachedArray.ShortValues;
-import org.apache.lucene.util.FieldCacheSanityChecker;
+import java.io.IOException;
+import java.io.PrintStream;
+
/**
* Expert: The default cache implementation, storing all values in memory.
* A WeakHashMap is used for storage.
@@ -54,353 +37,134 @@
*
* @since lucene 1.4
*/
+@Deprecated
public class FieldCacheImpl implements FieldCache { // Made Public so that
-
+
- private Map,Cache> caches;
- FieldCacheImpl() {
- init();
- }
- private synchronized void init() {
- caches = new HashMap,Cache>(7);
- caches.put(Byte.TYPE, new Cache(this));
- caches.put(Short.TYPE, new Cache(this));
- caches.put(Integer.TYPE, new Cache(this));
- caches.put(Float.TYPE, new Cache(this));
- caches.put(Long.TYPE, new Cache(this));
- caches.put(Double.TYPE, new Cache(this));
- caches.put(DocTermsIndex.class, new Cache(this));
- caches.put(DocTerms.class, new Cache(this));
- }
-
public synchronized void purgeAllCaches() {
- init();
+ SlowMultiReaderWrapper.purgeAllCaches();
}
public synchronized void purge(IndexReader r) {
- for(Cache c : caches.values()) {
- c.purge(r);
+ new SlowMultiReaderWrapper(r).getFieldCache().purgeCache();
- }
+ }
- }
-
+
public synchronized CacheEntry[] getCacheEntries() {
- List result = new ArrayList(17);
- for(final Map.Entry,Cache> cacheEntry: caches.entrySet()) {
- final Cache> cache = cacheEntry.getValue();
- final Class> cacheType = cacheEntry.getKey();
- synchronized(cache.readerCache) {
- for( Object readerKey : cache.readerCache.keySet() ) {
- Map, Object> innerCache = cache.readerCache.get(readerKey);
- for (final Map.Entry, Object> mapEntry : innerCache.entrySet()) {
- Entry entry = (Entry)mapEntry.getKey();
- result.add(new CacheEntryImpl(readerKey, entry.field,
- cacheType, entry.creator,
- mapEntry.getValue()));
+ return SlowMultiReaderWrapper.getCacheEntries();
- }
+ }
- }
- }
- }
- return result.toArray(new CacheEntry[result.size()]);
- }
-
+
- private static final class CacheEntryImpl extends CacheEntry {
- private final Object readerKey;
- private final String fieldName;
- private final Class> cacheType;
- private final EntryCreator custom;
- private final Object value;
- CacheEntryImpl(Object readerKey, String fieldName,
- Class> cacheType,
- EntryCreator custom,
- Object value) {
- this.readerKey = readerKey;
- this.fieldName = fieldName;
- this.cacheType = cacheType;
- this.custom = custom;
- this.value = value;
-
- // :HACK: for testing.
-// if (null != locale || SortField.CUSTOM != sortFieldType) {
-// throw new RuntimeException("Locale/sortFieldType: " + this);
-// }
-
- }
- @Override
- public Object getReaderKey() { return readerKey; }
- @Override
- public String getFieldName() { return fieldName; }
- @Override
- public Class> getCacheType() { return cacheType; }
- @Override
- public Object getCustom() { return custom; }
- @Override
- public Object getValue() { return value; }
- }
-
- final static IndexReader.ReaderFinishedListener purgeReader = new IndexReader.ReaderFinishedListener() {
- @Override
- public void finished(IndexReader reader) {
- FieldCache.DEFAULT.purge(reader);
- }
- };
-
- /** Expert: Internal cache. */
- final static class Cache {
- Cache() {
- this.wrapper = null;
- }
-
- Cache(FieldCache wrapper) {
- this.wrapper = wrapper;
- }
-
- final FieldCache wrapper;
-
- final Map,Object>> readerCache = new WeakHashMap,Object>>();
-
- protected Object createValue(IndexReader reader, Entry entryKey) throws IOException {
- return entryKey.creator.create( reader );
- }
-
- /** Remove this reader from the cache, if present. */
- public void purge(IndexReader r) {
- Object readerKey = r.getCoreCacheKey();
- synchronized(readerCache) {
- readerCache.remove(readerKey);
- }
- }
-
- @SuppressWarnings("unchecked")
- public Object get(IndexReader reader, Entry key) throws IOException {
- Map,Object> innerCache;
- Object value;
- final Object readerKey = reader.getCoreCacheKey();
- synchronized (readerCache) {
- innerCache = readerCache.get(readerKey);
- if (innerCache == null) {
- // First time this reader is using FieldCache
- innerCache = new HashMap,Object>();
- readerCache.put(readerKey, innerCache);
- reader.addReaderFinishedListener(purgeReader);
- value = null;
- } else {
- value = innerCache.get(key);
- }
- if (value == null) {
- value = new CreationPlaceholder();
- innerCache.put(key, value);
- }
- }
- if (value instanceof CreationPlaceholder) {
- synchronized (value) {
- CreationPlaceholder progress = (CreationPlaceholder) value;
- if (progress.value == null) {
- progress.value = createValue(reader, key);
- synchronized (readerCache) {
- innerCache.put(key, progress.value);
- }
-
- // Only check if key.custom (the parser) is
- // non-null; else, we check twice for a single
- // call to FieldCache.getXXX
- if (key.creator != null && wrapper != null) {
- final PrintStream infoStream = wrapper.getInfoStream();
- if (infoStream != null) {
- printNewInsanity(infoStream, progress.value);
- }
- }
- }
- return progress.value;
- }
- }
-
- // Validate new entries
- if( key.creator.shouldValidate() ) {
- key.creator.validate( (T)value, reader);
- }
- return value;
- }
-
- private void printNewInsanity(PrintStream infoStream, Object value) {
- final FieldCacheSanityChecker.Insanity[] insanities = FieldCacheSanityChecker.checkSanity(wrapper);
- for(int i=0;i {
- final String field; // which Fieldable
- final EntryCreator creator; // which custom comparator or parser
-
- /** Creates one of these objects for a custom comparator/parser. */
- Entry (String field, EntryCreator custom) {
- this.field = field;
- this.creator = custom;
- }
-
- /** Two of these are equal iff they reference the same field and type. */
- @Override
- public boolean equals (Object o) {
- if (o instanceof Entry) {
- Entry other = (Entry) o;
- if (other.field.equals(field)) {
- if (other.creator == null) {
- if (creator == null) return true;
- } else if (other.creator.equals (creator)) {
- return true;
- }
- }
- }
- return false;
- }
-
- /** Composes a hashcode based on the field and type. */
- @Override
- public int hashCode() {
- return field.hashCode() ^ (creator==null ? 0 : creator.hashCode());
- }
- }
-
// inherit javadocs
- public byte[] getBytes (IndexReader reader, String field) throws IOException {
+ public byte[] getBytes(IndexReader reader, String field) throws IOException {
- return getBytes(reader, field, new ByteValuesCreator(field, null)).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getBytes(field);
}
// inherit javadocs
public byte[] getBytes(IndexReader reader, String field, ByteParser parser) throws IOException {
- return getBytes(reader, field, new ByteValuesCreator(field, parser)).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getBytes(field, new ByteValuesCreator(field, parser)).values;
}
@SuppressWarnings("unchecked")
- public ByteValues getBytes(IndexReader reader, String field, EntryCreator creator ) throws IOException
- {
- return (ByteValues)caches.get(Byte.TYPE).get(reader, new Entry(field, creator));
+ public ByteValues getBytes(IndexReader reader, String field, EntryCreator creator ) throws IOException {
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getBytes(field, creator);
}
// inherit javadocs
public short[] getShorts (IndexReader reader, String field) throws IOException {
- return getShorts(reader, field, new ShortValuesCreator(field,null)).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getShorts(field);
}
// inherit javadocs
public short[] getShorts(IndexReader reader, String field, ShortParser parser) throws IOException {
- return getShorts(reader, field, new ShortValuesCreator(field,parser)).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getShorts(field, parser);
}
@SuppressWarnings("unchecked")
- public ShortValues getShorts(IndexReader reader, String field, EntryCreator creator ) throws IOException
- {
- return (ShortValues)caches.get(Short.TYPE).get(reader, new Entry(field, creator));
+ public ShortValues getShorts(IndexReader reader, String field, EntryCreator creator ) throws IOException {
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getShorts(field, creator);
}
// inherit javadocs
public int[] getInts (IndexReader reader, String field) throws IOException {
- return getInts(reader, field, new IntValuesCreator( field, null )).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getInts(field);
}
// inherit javadocs
public int[] getInts(IndexReader reader, String field, IntParser parser) throws IOException {
- return getInts(reader, field, new IntValuesCreator( field, parser )).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getInts(field, parser);
}
@SuppressWarnings("unchecked")
public IntValues getInts(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (IntValues)caches.get(Integer.TYPE).get(reader, new Entry(field, creator));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getInts(field, creator);
}
// inherit javadocs
public float[] getFloats (IndexReader reader, String field) throws IOException {
- return getFloats(reader, field, new FloatValuesCreator( field, null ) ).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getFloats(field);
}
// inherit javadocs
public float[] getFloats(IndexReader reader, String field, FloatParser parser) throws IOException {
- return getFloats(reader, field, new FloatValuesCreator( field, parser ) ).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getFloats(field, parser);
}
@SuppressWarnings("unchecked")
public FloatValues getFloats(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (FloatValues)caches.get(Float.TYPE).get(reader, new Entry(field, creator));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getFloats(field, creator);
}
public long[] getLongs(IndexReader reader, String field) throws IOException {
- return getLongs(reader, field, new LongValuesCreator( field, null ) ).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getLongs(field);
}
// inherit javadocs
public long[] getLongs(IndexReader reader, String field, FieldCache.LongParser parser) throws IOException {
- return getLongs(reader, field, new LongValuesCreator( field, parser ) ).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getLongs(field, parser);
}
@SuppressWarnings("unchecked")
public LongValues getLongs(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (LongValues)caches.get(Long.TYPE).get(reader, new Entry(field, creator));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getLongs(field, creator);
}
// inherit javadocs
public double[] getDoubles(IndexReader reader, String field) throws IOException {
- return getDoubles(reader, field, new DoubleValuesCreator( field, null ) ).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getDoubles(field);
}
// inherit javadocs
public double[] getDoubles(IndexReader reader, String field, FieldCache.DoubleParser parser) throws IOException {
- return getDoubles(reader, field, new DoubleValuesCreator( field, parser ) ).values;
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getDoubles(field, parser);
}
@SuppressWarnings("unchecked")
public DoubleValues getDoubles(IndexReader reader, String field, EntryCreator creator ) throws IOException {
- return (DoubleValues)caches.get(Double.TYPE).get(reader, new Entry(field, creator));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getDoubles(field, creator);
}
- public DocTermsIndex getTermsIndex(IndexReader reader, String field) throws IOException {
+ public DocTermsIndex getTermsIndex(IndexReader reader, String field) throws IOException {
- return getTermsIndex(reader, field, new DocTermsIndexCreator(field));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getTermsIndex(field);
}
- public DocTermsIndex getTermsIndex(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
+ public DocTermsIndex getTermsIndex(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
- return getTermsIndex(reader, field, new DocTermsIndexCreator(field,
- fasterButMoreRAM ? DocTermsIndexCreator.FASTER_BUT_MORE_RAM : 0));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getTermsIndex(field, fasterButMoreRAM);
}
- @SuppressWarnings("unchecked")
- public DocTermsIndex getTermsIndex(IndexReader reader, String field, EntryCreator creator) throws IOException {
- return (DocTermsIndex)caches.get(DocTermsIndex.class).get(reader, new Entry(field, creator));
- }
-
// TODO: this if DocTermsIndex was already created, we
// should share it...
public DocTerms getTerms(IndexReader reader, String field) throws IOException {
- return getTerms(reader, field, new DocTermsCreator(field));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getTerms(field);
}
public DocTerms getTerms(IndexReader reader, String field, boolean fasterButMoreRAM) throws IOException {
- return getTerms(reader, field, new DocTermsCreator(field,
- fasterButMoreRAM ? DocTermsCreator.FASTER_BUT_MORE_RAM : 0));
+ return new SlowMultiReaderWrapper(reader).getFieldCache().getTerms(field, fasterButMoreRAM);
}
- @SuppressWarnings("unchecked")
- public DocTerms getTerms(IndexReader reader, String field, EntryCreator creator) throws IOException {
- return (DocTerms)caches.get(DocTerms.class).get(reader, new Entry(field, creator));
- }
-
- private volatile PrintStream infoStream;
-
public void setInfoStream(PrintStream stream) {
- infoStream = stream;
+ SlowMultiReaderWrapper.setInfoStream(stream);
}
public PrintStream getInfoStream() {
- return infoStream;
+ return SlowMultiReaderWrapper.getInfoStream();
}
}
Index: lucene/src/java/org/apache/lucene/search/cache/AtomicFieldCacheImpl.java
===================================================================
--- lucene/src/java/org/apache/lucene/search/cache/AtomicFieldCacheImpl.java (revision )
+++ lucene/src/java/org/apache/lucene/search/cache/AtomicFieldCacheImpl.java (revision )
@@ -0,0 +1,363 @@
+package org.apache.lucene.search.cache;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.FieldCache;
+import org.apache.lucene.util.FieldCacheSanityChecker;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ *
+ */
+public class AtomicFieldCacheImpl implements AtomicFieldCache {
+
+ private final IndexReader indexReader;
+ private final Map, Cache> cache;
+
+ public AtomicFieldCacheImpl(IndexReader indexReader) {
+ if (indexReader == null) {
+ throw new IllegalArgumentException("Supplied indexReader cannot be null");
+ }
+
+ this.indexReader = indexReader;
+ cache = new HashMap, Cache>(7);
+ initCache();
+ }
+
+ private void initCache() {
+ cache.put(Byte.TYPE, new Cache(this, indexReader));
+ cache.put(Short.TYPE, new Cache(this, indexReader));
+ cache.put(Integer.TYPE, new Cache(this, indexReader));
+ cache.put(Float.TYPE, new Cache(this, indexReader));
+ cache.put(Long.TYPE, new Cache(this, indexReader));
+ cache.put(Double.TYPE, new Cache(this, indexReader));
+ cache.put(FieldCache.DocTermsIndex.class, new Cache(this, indexReader));
+ cache.put(FieldCache.DocTerms.class, new Cache(this, indexReader));
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public byte[] getBytes(String field) throws IOException {
+ return getBytes(field, new ByteValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public byte[] getBytes(String field, FieldCache.ByteParser parser) throws IOException {
+ return getBytes(field, new ByteValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.ByteValues getBytes(String field, EntryCreator creator) throws IOException {
+ return (CachedArray.ByteValues) cache.get(Byte.TYPE).get(new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public short[] getShorts(String field) throws IOException {
+ return getShorts(field, new ShortValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public short[] getShorts(String field, FieldCache.ShortParser parser) throws IOException {
+ return getShorts(field, new ShortValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.ShortValues getShorts(String field, EntryCreator creator) throws IOException {
+ return (CachedArray.ShortValues) cache.get(Short.TYPE).get(new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public int[] getInts(String field) throws IOException {
+ return getInts(field, new IntValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public int[] getInts(String field, FieldCache.IntParser parser) throws IOException {
+ return getInts(field, new IntValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.IntValues getInts(String field, EntryCreator creator) throws IOException {
+ return (CachedArray.IntValues) cache.get(Integer.TYPE).get(new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public float[] getFloats(String field) throws IOException {
+ return getFloats(field, new FloatValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public float[] getFloats(String field, FieldCache.FloatParser parser) throws IOException {
+ return getFloats(field, new FloatValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.FloatValues getFloats(String field, EntryCreator creator) throws IOException {
+ return (CachedArray.FloatValues) cache.get(Float.TYPE).get(new Entry(field, creator));
+ }
+
+ public long[] getLongs(String field) throws IOException {
+ return getLongs(field, new LongValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public long[] getLongs(String field, FieldCache.LongParser parser) throws IOException {
+ return getLongs(field, new LongValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.LongValues getLongs(String field, EntryCreator creator) throws IOException {
+ return (CachedArray.LongValues) cache.get(Long.TYPE).get(new Entry(field, creator));
+ }
+
+ // inherit javadocs
+ public double[] getDoubles(String field) throws IOException {
+ return getDoubles(field, new DoubleValuesCreator(field, null)).values;
+ }
+
+ // inherit javadocs
+ public double[] getDoubles(String field, FieldCache.DoubleParser parser) throws IOException {
+ return getDoubles(field, new DoubleValuesCreator(field, parser)).values;
+ }
+
+ @SuppressWarnings("unchecked")
+ public CachedArray.DoubleValues getDoubles(String field, EntryCreator creator) throws IOException {
+ return (CachedArray.DoubleValues) cache.get(Double.TYPE).get(new Entry(field, creator));
+ }
+
+ public FieldCache.DocTermsIndex getTermsIndex(String field) throws IOException {
+ return getTermsIndex(field, new DocTermsIndexCreator(field));
+ }
+
+ public FieldCache.DocTermsIndex getTermsIndex(String field, boolean fasterButMoreRAM) throws IOException {
+ return getTermsIndex(field, new DocTermsIndexCreator(field,
+ fasterButMoreRAM ? DocTermsIndexCreator.FASTER_BUT_MORE_RAM : 0));
+ }
+
+ @SuppressWarnings("unchecked")
+ public FieldCache.DocTermsIndex getTermsIndex(String field, EntryCreator creator) throws IOException {
+ return (FieldCache.DocTermsIndex) cache.get(FieldCache.DocTermsIndex.class).get(new Entry(field, creator));
+ }
+
+ // TODO: this if DocTermsIndex was already created, we
+ // should share it...
+ public FieldCache.DocTerms getTerms(String field) throws IOException {
+ return getTerms(field, new DocTermsCreator(field));
+ }
+
+ public FieldCache.DocTerms getTerms(String field, boolean fasterButMoreRAM) throws IOException {
+ return getTerms(field, new DocTermsCreator(field,
+ fasterButMoreRAM ? DocTermsCreator.FASTER_BUT_MORE_RAM : 0));
+ }
+
+ @SuppressWarnings("unchecked")
+ public FieldCache.DocTerms getTerms(String field, EntryCreator creator) throws IOException {
+ return (FieldCache.DocTerms) cache.get(FieldCache.DocTerms.class).get(new Entry(field, creator));
+ }
+
+ private volatile PrintStream infoStream;
+
+ public void setInfoStream(PrintStream stream) {
+ infoStream = stream;
+ }
+
+ public PrintStream getInfoStream() {
+ return infoStream;
+ }
+
+ public FieldCache.CacheEntry[] getCacheEntries() {
+ List result = new ArrayList(17);
+ for (final Map.Entry, Cache> cacheEntry : cache.entrySet()) {
+ final Class> cacheType = cacheEntry.getKey();
+ final Cache> cache = cacheEntry.getValue();
+ synchronized (cache.readerCache) {
+ for (final Map.Entry, Object> mapEntry : cache.readerCache.entrySet()) {
+ Entry entry = (Entry) mapEntry.getKey();
+ result.add(new CacheEntryImpl(indexReader, entry.field, cacheType, entry.creator, mapEntry.getValue()));
+ }
+ }
+ }
+ return result.toArray(new FieldCache.CacheEntry[result.size()]);
+ }
+
+ public void purgeCache() {
+ cache.clear();
+ initCache();
+ }
+
+
+ private static class Cache {
+
+ private final AtomicFieldCache wrapper;
+ private final IndexReader indexReader;
+ private final Map,Object> readerCache;
+
+ Cache(AtomicFieldCache wrapper, IndexReader indexReader) {
+ this.wrapper = wrapper;
+ this.indexReader = indexReader;
+ this.readerCache = new HashMap,Object>();
+ }
+
+ protected Object createValue(IndexReader reader, Entry entryKey) throws IOException {
+ return entryKey.creator.create(reader);
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object get(Entry key) throws IOException {
+ Object value;
+
+ synchronized (readerCache) {
+ value = readerCache.get(key);
+ if (value == null) {
+ value = new FieldCache.CreationPlaceholder();
+ readerCache.put(key, value);
+ }
+ }
+ if (value instanceof FieldCache.CreationPlaceholder) {
+ synchronized (value) {
+ FieldCache.CreationPlaceholder progress = (FieldCache.CreationPlaceholder) value;
+ if (progress.value != null) {
+ return progress.value;
+ }
+ progress.value = createValue(indexReader, key);
+ synchronized (readerCache) {
+ readerCache.put(key, progress.value);
+ }
+
+ // Only check if key.custom (the parser) is
+ // non-null; else, we check twice for a single
+ // call to FieldCache.getXXX
+ if (key.creator != null && wrapper != null) {
+ final PrintStream infoStream = wrapper.getInfoStream();
+ if (infoStream != null) {
+ printNewInsanity(infoStream, progress.value);
+ }
+ }
+ }
+ }
+
+ // Validate new entries
+ if( key.creator.shouldValidate() ) {
+ key.creator.validate( (T)value, indexReader);
+ }
+ return value;
+ }
+
+ private void printNewInsanity(PrintStream infoStream, Object value) {
+ final FieldCacheSanityChecker.Insanity[] insanities = FieldCacheSanityChecker.checkSanity(wrapper.getCacheEntries());
+ for(int i=0;i {
+
+ private final String field; // which Fieldable
+ private final EntryCreator creator; // which custom comparator or parser
+
+ /** Creates one of these objects for a custom comparator/parser. */
+ Entry (String field, EntryCreator custom) {
+ this.field = field;
+ this.creator = custom;
+ }
+
+ /** Two of these are equal iff they reference the same field and type. */
+ @Override
+ public boolean equals (Object o) {
+ if (o instanceof Entry) {
+ Entry other = (Entry) o;
+ if (other.field.equals(field)) {
+ if (other.creator == null) {
+ if (creator == null) return true;
+ } else if (other.creator.equals (creator)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ /** Composes a hashcode based on the field and type. */
+ @Override
+ public int hashCode() {
+ return field.hashCode() ^ (creator==null ? 0 : creator.hashCode());
+ }
+ }
+
+ private static class CacheEntryImpl extends FieldCache.CacheEntry {
+
+ private final IndexReader indexReader;
+ private final String fieldName;
+ private final Class> cacheType;
+ private final EntryCreator custom;
+ private final Object value;
+
+ CacheEntryImpl(IndexReader indexReader,
+ String fieldName,
+ Class> cacheType,
+ EntryCreator custom,
+ Object value) {
+ this.indexReader = indexReader;
+ this.fieldName = fieldName;
+ this.cacheType = cacheType;
+ this.custom = custom;
+ this.value = value;
+ }
+
+ public Object getReaderKey() {
+ return indexReader;
+ }
+
+ public String getFieldName() {
+ return fieldName;
+ }
+
+ public Class> getCacheType() {
+ return cacheType;
+ }
+
+ public Object getCustom() {
+ return custom;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+ }
+
+}
\ No newline at end of file
Index: lucene/src/java/org/apache/lucene/index/IndexReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/IndexReader.java (revision 1145594)
+++ lucene/src/java/org/apache/lucene/index/IndexReader.java (revision )
@@ -25,6 +25,7 @@
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.index.values.IndexDocValues;
+import org.apache.lucene.search.cache.AtomicFieldCache;
import org.apache.lucene.store.*;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
@@ -1582,7 +1583,11 @@
public int getTermInfosIndexDivisor() {
throw new UnsupportedOperationException("This reader does not support this method.");
}
-
+
+ public AtomicFieldCache getFieldCache() {
+ throw new UnsupportedOperationException("This reader does not support this method.");
+ }
+
public final IndexDocValues docValues(String field) throws IOException {
final PerDocValues perDoc = perDocValues();
if (perDoc == null) {
Index: lucene/src/java/org/apache/lucene/index/SegmentReader.java
===================================================================
--- lucene/src/java/org/apache/lucene/index/SegmentReader.java (revision 1148938)
+++ lucene/src/java/org/apache/lucene/index/SegmentReader.java (revision )
@@ -17,29 +17,21 @@
* limitations under the License.
*/
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.codecs.PerDocValues;
+import org.apache.lucene.search.cache.AtomicFieldCacheImpl;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.util.BitVector;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.CloseableThreadLocal;
-import org.apache.lucene.util.StringHelper;
+import org.apache.lucene.util.*;
+import java.io.IOException;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
/**
* @lucene.experimental
*/
@@ -72,8 +64,34 @@
SegmentCoreReaders core;
+ private final AtomicFieldCacheImpl segmentCache;
+
+ public SegmentReader() {
+ this.segmentCache = new AtomicFieldCacheImpl(this);
+ this.readerFinishedListeners = new MapBackedSet(new ConcurrentHashMap());
+ readerFinishedListeners.add(new ReaderFinishedListener() {
+
+ public void finished(IndexReader reader) {
+ segmentCache.purgeCache();
+ }
+
+ });
+ }
+
+ public SegmentReader(AtomicFieldCacheImpl segmentCache) {
+ this.segmentCache = segmentCache;
+ this.readerFinishedListeners = new MapBackedSet(new ConcurrentHashMap());
+ readerFinishedListeners.add(new ReaderFinishedListener() {
+
+ public void finished(IndexReader reader) {
+ SegmentReader.this.segmentCache.purgeCache();
+ }
+
+ });
+ }
+
/**
- * Sets the initial value
+ * Sets the initial value
*/
private class FieldsReaderLocal extends CloseableThreadLocal {
@Override
@@ -83,7 +101,7 @@
}
Map norms = new HashMap();
-
+
/**
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
@@ -103,7 +121,7 @@
int termInfosIndexDivisor,
IOContext context)
throws CorruptIndexException, IOException {
-
+
SegmentReader instance = new SegmentReader();
instance.readOnly = readOnly;
instance.si = si;
@@ -150,7 +168,7 @@
// Verify # deletes does not exceed maxDoc for this
// segment:
- assert si.getDelCount() <= maxDoc() :
+ assert si.getDelCount() <= maxDoc() :
"delete count mismatch: " + recomputedCount + ") exceeds max doc (" + maxDoc() + ") for segment " + si.name;
return true;
@@ -168,7 +186,7 @@
} else
assert si.getDelCount() == 0;
}
-
+
/**
* Clones the norm bytes. May be overridden by subclasses. New and experimental.
* @param bytes Byte array to clone
@@ -179,7 +197,7 @@
System.arraycopy(bytes, 0, cloneBytes, 0, bytes.length);
return cloneBytes;
}
-
+
/**
* Clones the deleteDocs BitVector. May be overridden by subclasses. New and experimental.
* @param bv BitVector to clone
@@ -216,10 +234,10 @@
}
synchronized SegmentReader reopenSegment(SegmentInfo si, boolean doClone, boolean openReadOnly) throws CorruptIndexException, IOException {
- boolean deletionsUpToDate = (this.si.hasDeletions() == si.hasDeletions())
+ boolean deletionsUpToDate = (this.si.hasDeletions() == si.hasDeletions())
&& (!si.hasDeletions() || this.si.getDelFileName().equals(si.getDelFileName()));
boolean normsUpToDate = true;
-
+
Set fieldNormsChanged = new HashSet();
for (FieldInfo fi : core.fieldInfos) {
int fieldNumber = fi.number;
@@ -233,14 +251,14 @@
// also if both old and new readers aren't readonly, we clone to avoid sharing modifications
if (normsUpToDate && deletionsUpToDate && !doClone && openReadOnly && readOnly) {
return this;
- }
+ }
// When cloning, the incoming SegmentInfos should not
// have any changes in it:
assert !doClone || (normsUpToDate && deletionsUpToDate);
// clone reader
- SegmentReader clone = new SegmentReader();
+ final SegmentReader clone = deletionsUpToDate ? new SegmentReader(segmentCache) : new SegmentReader();
boolean success = false;
try {
@@ -258,7 +276,7 @@
clone.hasChanges = hasChanges;
hasChanges = false;
}
-
+
if (doClone) {
if (liveDocs != null) {
liveDocsRef.incrementAndGet();
@@ -302,7 +320,7 @@
clone.decRef();
}
}
-
+
return clone;
}
@@ -374,7 +392,7 @@
protected void doClose() throws IOException {
termVectorsLocal.close();
fieldsReaderLocal.close();
-
+
if (liveDocs != null) {
liveDocsRef.decrementAndGet();
// null so if an app hangs on to us we still free most ram
@@ -450,7 +468,7 @@
List files() throws IOException {
return new ArrayList(si.files());
}
-
+
FieldInfos fieldInfos() {
return core.fieldInfos;
}
@@ -559,7 +577,7 @@
final SegmentNorms norm = norms.get(field);
if (norm == null) {
// not indexed, or norms not stored
- return null;
+ return null;
}
return norm.bytes();
}
@@ -592,7 +610,7 @@
if (!si.hasSeparateNorms(fi.number)) {
d = cfsDir;
}
-
+
// singleNormFile means multiple norms share this file
boolean singleNormFile = IndexFileNames.matchesExtension(fileName, IndexFileNames.NORMS_EXTENSION);
IndexInput normInput = null;
@@ -617,7 +635,7 @@
// if the size is exactly equal to maxDoc to detect a headerless file.
// NOTE: remove this check in Lucene 5.0!
String version = si.getVersion();
- final boolean isUnversioned =
+ final boolean isUnversioned =
(version == null || StringHelper.getVersionComparator().compare(version, "3.2") < 0)
&& normInput.length() == maxDoc();
if (isUnversioned) {
@@ -676,7 +694,7 @@
TermVectorsReader getTermVectorsReaderOrig() {
return core.getTermVectorsReaderOrig();
}
-
+
/** Return a term frequency vector for the specified document and field. The
* vector returned contains term numbers and frequencies for all terms in
* the specified field of this document, if the field had storeTermVector
@@ -688,13 +706,13 @@
// Check if this field is invalid or has no stored term vector
ensureOpen();
FieldInfo fi = core.fieldInfos.fieldInfo(field);
- if (fi == null || !fi.storeTermVector)
+ if (fi == null || !fi.storeTermVector)
return null;
-
+
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
return null;
-
+
return termVectorsReader.get(docNumber, field);
}
@@ -737,14 +755,14 @@
@Override
public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
ensureOpen();
-
+
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
return null;
-
+
return termVectorsReader.get(docNumber);
}
-
+
/** {@inheritDoc} */
@Override
public String toString() {
@@ -755,7 +773,7 @@
buffer.append(si.toString(core.dir, pendingDeleteCount));
return buffer.toString();
}
-
+
@Override
public ReaderContext getTopReaderContext() {
return readerContext;
@@ -767,7 +785,7 @@
public String getSegmentName() {
return core.segment;
}
-
+
/**
* Return the SegmentInfo of the segment this reader is reading.
*/
@@ -824,6 +842,11 @@
}
@Override
+ public org.apache.lucene.search.cache.AtomicFieldCache getFieldCache() {
+ return segmentCache;
+ }
+
+ @Override
protected void readerFinished() {
// Do nothing here -- we have more careful control on
// when to notify that a SegmentReader has finished,
@@ -832,7 +855,7 @@
// longer used (all SegmentReaders sharing it have been
// closed).
}
-
+
@Override
public PerDocValues perDocValues() throws IOException {
return core.perDocProducer;
Index: lucene/src/java/org/apache/lucene/search/cache/AtomicFieldCache.java
===================================================================
--- lucene/src/java/org/apache/lucene/search/cache/AtomicFieldCache.java (revision )
+++ lucene/src/java/org/apache/lucene/search/cache/AtomicFieldCache.java (revision )
@@ -0,0 +1,305 @@
+package org.apache.lucene.search.cache;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.PrintStream;
+
+import static org.apache.lucene.search.FieldCache.*;
+
+/**
+ * Expert: Maintains caches of term values.
+ */
+public interface AtomicFieldCache {
+
+ /** Checks the internal cache for an appropriate entry, and if none is
+ * found, reads the terms in field as a single byte and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ * @param field Which field contains the single byte values.
+ * @return The values in the given field for each document.
+ * @throws java.io.IOException If any error occurs.
+ */
+ public byte[] getBytes (String field) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as bytes and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ * @param field Which field contains the bytes.
+ * @param parser Computes byte for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public byte[] getBytes (String field, ByteParser parser) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as bytes and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ * @param field Which field contains the bytes.
+ * @param creator Used to make the ByteValues
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public CachedArray.ByteValues getBytes(String field, EntryCreator creator) throws IOException;
+
+
+ /** Checks the internal cache for an appropriate entry, and if none is
+ * found, reads the terms in field as shorts and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ * @param field Which field contains the shorts.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public short[] getShorts(String field) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as shorts and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ * @param field Which field contains the shorts.
+ * @param parser Computes short for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public short[] getShorts(String field, ShortParser parser) throws IOException;
+
+
+ /** Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as shorts and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ * @param field Which field contains the shorts.
+ * @param creator Computes short for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public CachedArray.ShortValues getShorts(String field, EntryCreator creator) throws IOException;
+
+
+ /** Checks the internal cache for an appropriate entry, and if none is
+ * found, reads the terms in field as integers and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ * @param field Which field contains the integers.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public int[] getInts(String field) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as integers and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ * @param field Which field contains the integers.
+ * @param parser Computes integer for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public int[] getInts(String field, IntParser parser) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as integers and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ * @param field Which field contains the integers.
+ * @param creator Computes integer for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public CachedArray.IntValues getInts(String field, EntryCreator creator) throws IOException;
+
+
+ /** Checks the internal cache for an appropriate entry, and if
+ * none is found, reads the terms in field as floats and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ * @param field Which field contains the floats.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public float[] getFloats(String field) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if
+ * none is found, reads the terms in field as floats and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ * @param field Which field contains the floats.
+ * @param parser Computes float for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public float[] getFloats(String field, FloatParser parser) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if
+ * none is found, reads the terms in field as floats and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ * @param field Which field contains the floats.
+ * @param creator Computes float for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public CachedArray.FloatValues getFloats(String field, EntryCreator creator) throws IOException;
+
+
+ /**
+ * Checks the internal cache for an appropriate entry, and if none is
+ * found, reads the terms in field as longs and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ *
+ * @param field Which field contains the longs.
+ * @return The values in the given field for each document.
+ * @throws java.io.IOException If any error occurs.
+ */
+ public long[] getLongs(String field) throws IOException;
+
+ /**
+ * Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as longs and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ *
+ * @param field Which field contains the longs.
+ * @param parser Computes integer for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public long[] getLongs(String field, LongParser parser) throws IOException;
+
+ /**
+ * Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as longs and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ *
+ * @param field Which field contains the longs.
+ * @param creator Computes integer for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public CachedArray.LongValues getLongs(String field, EntryCreator creator) throws IOException;
+
+
+ /**
+ * Checks the internal cache for an appropriate entry, and if none is
+ * found, reads the terms in field as integers and returns an array
+ * of size reader.maxDoc() of the value each document
+ * has in the given field.
+ *
+ * @param field Which field contains the doubles.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public double[] getDoubles(String field) throws IOException;
+
+ /**
+ * Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as doubles and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ *
+ * @param field Which field contains the doubles.
+ * @param parser Computes integer for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public double[] getDoubles(String field, DoubleParser parser) throws IOException;
+
+ /**
+ * Checks the internal cache for an appropriate entry, and if none is found,
+ * reads the terms in field as doubles and returns an array of
+ * size reader.maxDoc() of the value each document has in the
+ * given field.
+ *
+ * @param field Which field contains the doubles.
+ * @param creator Computes integer for string values.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public CachedArray.DoubleValues getDoubles(String field, EntryCreator creator ) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none
+ * is found, reads the term values in field
+ * and returns a {@link DocTerms} instance, providing a
+ * method to retrieve the term (as a BytesRef) per document.
+ * @param field Which field contains the strings.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public DocTerms getTerms(String field) throws IOException;
+
+ /** Expert: just like {@link #getTerms(String)},
+ * but you can specify whether more RAM should be consumed in exchange for
+ * faster lookups (default is "true"). Note that the
+ * first call for a given reader and field "wins",
+ * subsequent calls will share the same cache entry. */
+ public DocTerms getTerms(String field, boolean fasterButMoreRAM) throws IOException;
+
+ /** Checks the internal cache for an appropriate entry, and if none
+ * is found, reads the term values in field
+ * and returns a {@link DocTerms} instance, providing a
+ * method to retrieve the term (as a BytesRef) per document.
+ * @param field Which field contains the strings.
+ * @return The values in the given field for each document.
+ * @throws IOException If any error occurs.
+ */
+ public DocTermsIndex getTermsIndex(String field) throws IOException;
+
+
+ /** Expert: just like {@link
+ * #getTermsIndex(String)}, but you can specify
+ * whether more RAM should be consumed in exchange for
+ * faster lookups (default is "true"). Note that the
+ * first call for a given reader and field "wins",
+ * subsequent calls will share the same cache entry. */
+ public DocTermsIndex getTermsIndex (String field, boolean fasterButMoreRAM) throws IOException;
+
+ /**
+ * EXPERT: Generates an array of CacheEntry objects representing all items
+ * currently in the FieldCache.
+ *
+ * NOTE: These CacheEntry objects maintain a strong reference to the
+ * Cached Values. Maintaining references to a CacheEntry the IndexReader
+ * associated with it has garbage collected will prevent the Value itself
+ * from being garbage collected when the Cache drops the WeakReference.
+ *
+ * @lucene.experimental
+ */
+ public abstract CacheEntry[] getCacheEntries();
+
+ /**
+ * Expert: drops all cache entries associated with this
+ * field cache.
+ */
+ public abstract void purgeCache();
+
+ /**
+ * If non-null, FieldCacheImpl will warn whenever
+ * entries are created that are not sane according to
+ * {@link org.apache.lucene.util.FieldCacheSanityChecker}.
+ */
+ public void setInfoStream(PrintStream stream);
+
+ /** counterpart of {@link #setInfoStream(PrintStream)} */
+ public PrintStream getInfoStream();
+
+}
Index: lucene/src/java/org/apache/lucene/search/FieldCache.java
===================================================================
--- lucene/src/java/org/apache/lucene/search/FieldCache.java (revision 1068526)
+++ lucene/src/java/org/apache/lucene/search/FieldCache.java (revision )
@@ -41,10 +41,11 @@
* @since lucene 1.4
* @see org.apache.lucene.util.FieldCacheSanityChecker
*/
+@Deprecated
public interface FieldCache {
public static final class CreationPlaceholder {
- Object value;
+ public Object value;
}
/**
@@ -112,6 +113,7 @@
}
/** Expert: The cache used internally by sorting and range query classes. */
+ @Deprecated
public static FieldCache DEFAULT = new FieldCacheImpl();
/** The default parser for byte values, which are encoded by {@link Byte#toString(byte)} */