Index: src/test/org/apache/lucene/index/TestIndexWriterReader.java =================================================================== --- src/test/org/apache/lucene/index/TestIndexWriterReader.java (revision 792244) +++ src/test/org/apache/lucene/index/TestIndexWriterReader.java (working copy) @@ -42,7 +42,7 @@ public class TestIndexWriterReader extends LuceneTestCase { static PrintStream infoStream; - private static class HeavyAtomicInt { + public static class HeavyAtomicInt { private int value; public HeavyAtomicInt(int start) { value = start; Index: src/test/org/apache/lucene/index/TestLazyProxSkipping.java =================================================================== --- src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 792244) +++ src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy) @@ -75,7 +75,7 @@ SegmentReader reader = SegmentReader.getOnlySegmentReader(directory); // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek() - reader.proxStream = new SeeksCountingStream(reader.proxStream); + reader.core.proxStream = new SeeksCountingStream(reader.core.proxStream); this.searcher = new IndexSearcher(reader); } Index: src/java/org/apache/lucene/index/SegmentTermPositions.java =================================================================== --- src/java/org/apache/lucene/index/SegmentTermPositions.java (revision 792244) +++ src/java/org/apache/lucene/index/SegmentTermPositions.java (working copy) @@ -146,7 +146,7 @@ private void lazySkip() throws IOException { if (proxStream == null) { // clone lazily - proxStream = (IndexInput)parent.proxStream.clone(); + proxStream = (IndexInput) parent.core.proxStream.clone(); } // we might have to skip the current payload Index: src/java/org/apache/lucene/index/SegmentReader.java =================================================================== --- src/java/org/apache/lucene/index/SegmentReader.java (revision 792244) +++ src/java/org/apache/lucene/index/SegmentReader.java (working copy) @@ -48,10 +48,7 @@ private int readBufferSize; FieldInfos fieldInfos; - private FieldsReader fieldsReaderOrig = null; CloseableThreadLocal fieldsReaderLocal = new FieldsReaderLocal(); - TermInfosReader tis; - TermVectorsReader termVectorsReaderOrig = null; CloseableThreadLocal termVectorsLocal = new CloseableThreadLocal(); BitVector deletedDocs = null; @@ -64,31 +61,39 @@ private boolean rollbackDeletedDocsDirty = false; private boolean rollbackNormsDirty = false; private int rollbackPendingDeleteCount; - IndexInput freqStream; - IndexInput proxStream; // optionally used for the .nrm file shared by multiple norms private IndexInput singleNormStream; private Ref singleNormRef; - // Counts how many other reader share the core objects - // (freqStream, proxStream, tis, etc.) of this reader; - // when coreRef drops to 0, these core objects may be - // closed. A given insance of SegmentReader may be - // closed, even those it shares core objects with other - // SegmentReaders: - private Ref coreRef = new Ref(); + // Holds core readers that are shared when SegmentReader + // is cloned + static final class CoreReaders { + // Counts how many other reader share the core objects + // (freqStream, proxStream, tis, etc.) of this reader; + // when coreRef drops to 0, these core objects may be + // closed. A given insance of SegmentReader may be + // closed, even those it shares core objects with other + // SegmentReaders: + Ref ref = new Ref(); - // Compound File Reader when based on a compound file segment - CompoundFileReader cfsReader = null; - CompoundFileReader storeCFSReader = null; - + FieldsReader fieldsReaderOrig; + TermVectorsReader termVectorsReaderOrig; + CompoundFileReader cfsReader; + CompoundFileReader storeCFSReader; + TermInfosReader tis; + IndexInput freqStream; + IndexInput proxStream; + } + + CoreReaders core; + /** * Sets the initial value */ private class FieldsReaderLocal extends CloseableThreadLocal { protected Object initialValue() { - return (FieldsReader) fieldsReaderOrig.clone(); + return (FieldsReader) core.fieldsReaderOrig.clone(); } } @@ -426,6 +431,7 @@ instance.segment = si.name; instance.si = si; instance.readBufferSize = readBufferSize; + instance.core = new CoreReaders(); boolean success = false; @@ -433,8 +439,8 @@ // Use compound file directory for some files, if it exists Directory cfsDir = instance.directory(); if (si.getUseCompoundFile()) { - instance.cfsReader = new CompoundFileReader(instance.directory(), instance.segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize); - cfsDir = instance.cfsReader; + instance.core.cfsReader = new CompoundFileReader(instance.directory(), instance.segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize); + cfsDir = instance.core.cfsReader; } instance.fieldInfos = new FieldInfos(cfsDir, instance.segment + ".fnm"); @@ -449,15 +455,15 @@ if (!instance.fieldInfos.fieldInfo(i).omitTermFreqAndPositions) anyProx = true; - instance.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize); + instance.core.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize); instance.loadDeletedDocs(); // make sure that all index files have been read or are kept open // so that if an index update removes them we'll still have them - instance.freqStream = cfsDir.openInput(instance.segment + ".frq", readBufferSize); + instance.core.freqStream = cfsDir.openInput(instance.segment + ".frq", readBufferSize); if (anyProx) - instance.proxStream = cfsDir.openInput(instance.segment + ".prx", readBufferSize); + instance.core.proxStream = cfsDir.openInput(instance.segment + ".prx", readBufferSize); instance.openNorms(cfsDir, readBufferSize); success = true; @@ -475,26 +481,15 @@ return instance; } - synchronized void openDocStores(SegmentReader orig) throws IOException { - if (fieldsReaderOrig == null) { - orig.openDocStores(); - - fieldsReaderOrig = orig.fieldsReaderOrig; - termVectorsReaderOrig = orig.termVectorsReaderOrig; - storeCFSReader = orig.storeCFSReader; - cfsReader = orig.cfsReader; - } - } - synchronized void openDocStores() throws IOException { - if (fieldsReaderOrig == null) { + if (core.fieldsReaderOrig == null) { final Directory storeDir; if (si.getDocStoreOffset() != -1) { if (si.getDocStoreIsCompoundFile()) { - storeCFSReader = new CompoundFileReader(directory(), - si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, - readBufferSize); - storeDir = storeCFSReader; + core.storeCFSReader = new CompoundFileReader(directory(), + si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, + readBufferSize); + storeDir = core.storeCFSReader; assert storeDir != null; } else { storeDir = directory(); @@ -504,10 +499,10 @@ // In some cases, we were originally opened when CFS // was not used, but then we are asked to open doc // stores after the segment has switched to CFS - if (cfsReader == null) { - cfsReader = new CompoundFileReader(directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize); + if (core.cfsReader == null) { + core.cfsReader = new CompoundFileReader(directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize); } - storeDir = cfsReader; + storeDir = core.cfsReader; assert storeDir != null; } else { storeDir = directory(); @@ -521,16 +516,16 @@ storesSegment = segment; } - fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, - si.getDocStoreOffset(), si.docCount); + core.fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, + si.getDocStoreOffset(), si.docCount); // Verify two sources of "maxDoc" agree: - if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) { - throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount); + if (si.getDocStoreOffset() == -1 && core.fieldsReaderOrig.size() != si.docCount) { + throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + core.fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount); } if (fieldInfos.hasVectors()) { // open term vector files only as needed - termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount); + core.termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount); } } } @@ -622,22 +617,15 @@ boolean success = false; try { - coreRef.incRef(); - clone.coreRef = coreRef; + clone.core = core; + clone.core.ref.incRef(); clone.readOnly = openReadOnly; clone.directory = directory; clone.si = si; clone.segment = segment; clone.readBufferSize = readBufferSize; - clone.cfsReader = cfsReader; - clone.storeCFSReader = storeCFSReader; clone.fieldInfos = fieldInfos; - clone.tis = tis; - clone.freqStream = freqStream; - clone.proxStream = proxStream; - clone.termVectorsReaderOrig = termVectorsReaderOrig; - clone.fieldsReaderOrig = fieldsReaderOrig; if (!openReadOnly && hasChanges) { // My pending changes transfer to the new reader @@ -683,7 +671,7 @@ // If we are not cloning, then this will open anew // any norms that have changed: - clone.openNorms(si.getUseCompoundFile() ? cfsReader : directory(), readBufferSize); + clone.openNorms(si.getUseCompoundFile() ? core.cfsReader : directory(), readBufferSize); success = true; } finally { @@ -738,7 +726,7 @@ FieldsReader getFieldsReader() { return (FieldsReader) fieldsReaderLocal.get(); } - + protected void doClose() throws IOException { termVectorsLocal.close(); fieldsReaderLocal.close(); @@ -754,31 +742,31 @@ ((Norm) it.next()).decRef(); } - if (coreRef.decRef() == 0) { + if (core.ref.decRef() == 0) { // close everything, nothing is shared anymore with other readers - if (tis != null) { - tis.close(); + if (core.tis != null) { + core.tis.close(); // null so if an app hangs on to us we still free most ram - tis = null; + core.tis = null; } - if (freqStream != null) - freqStream.close(); - if (proxStream != null) - proxStream.close(); + if (core.freqStream != null) + core.freqStream.close(); + if (core.proxStream != null) + core.proxStream.close(); - if (termVectorsReaderOrig != null) - termVectorsReaderOrig.close(); + if (core.termVectorsReaderOrig != null) + core.termVectorsReaderOrig.close(); - if (fieldsReaderOrig != null) - fieldsReaderOrig.close(); + if (core.fieldsReaderOrig != null) + core.fieldsReaderOrig.close(); - if (cfsReader != null) - cfsReader.close(); + if (core.cfsReader != null) + core.cfsReader.close(); - if (storeCFSReader != null) - storeCFSReader.close(); + if (core.storeCFSReader != null) + core.storeCFSReader.close(); } } @@ -841,12 +829,12 @@ public TermEnum terms() { ensureOpen(); - return tis.terms(); + return core.tis.terms(); } public TermEnum terms(Term t) throws IOException { ensureOpen(); - return tis.terms(t); + return core.tis.terms(t); } FieldInfos getFieldInfos() { @@ -882,7 +870,7 @@ public int docFreq(Term t) throws IOException { ensureOpen(); - TermInfo ti = tis.get(t); + TermInfo ti = core.tis.get(t); if (ti != null) return ti.docFreq; else @@ -903,11 +891,11 @@ } public void setTermInfosIndexDivisor(int indexDivisor) throws IllegalStateException { - tis.setIndexDivisor(indexDivisor); + core.tis.setIndexDivisor(indexDivisor); } public int getTermInfosIndexDivisor() { - return tis.getIndexDivisor(); + return core.tis.getIndexDivisor(); } /** @@ -1089,12 +1077,12 @@ * Create a clone from the initial TermVectorsReader and store it in the ThreadLocal. * @return TermVectorsReader */ - private TermVectorsReader getTermVectorsReader() { - assert termVectorsReaderOrig != null; + TermVectorsReader getTermVectorsReader() { + assert core.termVectorsReaderOrig != null; TermVectorsReader tvReader = (TermVectorsReader)termVectorsLocal.get(); if (tvReader == null) { try { - tvReader = (TermVectorsReader)termVectorsReaderOrig.clone(); + tvReader = (TermVectorsReader)core.termVectorsReaderOrig.clone(); } catch (CloneNotSupportedException cnse) { return null; } @@ -1102,6 +1090,10 @@ } return tvReader; } + + TermVectorsReader getTermVectorsReaderOrig() { + return core.termVectorsReaderOrig; + } /** Return a term frequency vector for the specified document and field. The * vector returned contains term numbers and frequencies for all terms in @@ -1113,7 +1105,7 @@ // Check if this field is invalid or has no stored term vector ensureOpen(); FieldInfo fi = fieldInfos.fieldInfo(field); - if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null) + if (fi == null || !fi.storeTermVector || core.termVectorsReaderOrig == null) return null; TermVectorsReader termVectorsReader = getTermVectorsReader(); @@ -1127,7 +1119,7 @@ public void getTermFreqVector(int docNumber, String field, TermVectorMapper mapper) throws IOException { ensureOpen(); FieldInfo fi = fieldInfos.fieldInfo(field); - if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null) + if (fi == null || !fi.storeTermVector || core.termVectorsReaderOrig == null) return; TermVectorsReader termVectorsReader = getTermVectorsReader(); @@ -1143,7 +1135,7 @@ public void getTermFreqVector(int docNumber, TermVectorMapper mapper) throws IOException { ensureOpen(); - if (termVectorsReaderOrig == null) + if (core.termVectorsReaderOrig == null) return; TermVectorsReader termVectorsReader = getTermVectorsReader(); @@ -1162,7 +1154,7 @@ */ public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException { ensureOpen(); - if (termVectorsReaderOrig == null) + if (core.termVectorsReaderOrig == null) return null; TermVectorsReader termVectorsReader = getTermVectorsReader(); @@ -1231,11 +1223,11 @@ // share the underlying postings data) will map to the // same entry in the FieldCache. See LUCENE-1579. public final Object getFieldCacheKey() { - return freqStream; + return core.freqStream; } public long getUniqueTermCount() { - return tis.size(); + return core.tis.size(); } /** @@ -1261,4 +1253,5 @@ throw new IllegalArgumentException(reader + " is not a SegmentReader or a single-segment DirectoryReader"); } + } Index: src/java/org/apache/lucene/index/SegmentTermDocs.java =================================================================== --- src/java/org/apache/lucene/index/SegmentTermDocs.java (revision 792244) +++ src/java/org/apache/lucene/index/SegmentTermDocs.java (working copy) @@ -45,16 +45,16 @@ protected SegmentTermDocs(SegmentReader parent) { this.parent = parent; - this.freqStream = (IndexInput) parent.freqStream.clone(); + this.freqStream = (IndexInput) parent.core.freqStream.clone(); synchronized (parent) { this.deletedDocs = parent.deletedDocs; } - this.skipInterval = parent.tis.getSkipInterval(); - this.maxSkipLevels = parent.tis.getMaxSkipLevels(); + this.skipInterval = parent.core.tis.getSkipInterval(); + this.maxSkipLevels = parent.core.tis.getMaxSkipLevels(); } public void seek(Term term) throws IOException { - TermInfo ti = parent.tis.get(term); + TermInfo ti = parent.core.tis.get(term); seek(ti, term); } @@ -69,7 +69,7 @@ ti = segmentTermEnum.termInfo(); } else { // punt case term = termEnum.term(); - ti = parent.tis.get(term); + ti = parent.core.tis.get(term); } seek(ti, term); Index: src/java/org/apache/lucene/index/SegmentMerger.java =================================================================== --- src/java/org/apache/lucene/index/SegmentMerger.java (revision 792244) +++ src/java/org/apache/lucene/index/SegmentMerger.java (working copy) @@ -468,7 +468,7 @@ final SegmentReader matchingSegmentReader = matchingSegmentReaders[idx++]; TermVectorsReader matchingVectorsReader = null; if (matchingSegmentReader != null) { - TermVectorsReader vectorsReader = matchingSegmentReader.termVectorsReaderOrig; + TermVectorsReader vectorsReader = matchingSegmentReader.getTermVectorsReaderOrig(); // If the TV* files are an older format then they cannot read raw docs: if (vectorsReader != null && vectorsReader.canReadRawDocs()) { Index: src/java/org/apache/lucene/index/IndexWriter.java =================================================================== --- src/java/org/apache/lucene/index/IndexWriter.java (revision 792244) +++ src/java/org/apache/lucene/index/IndexWriter.java (working copy) @@ -4891,7 +4891,7 @@ } for(int i=0;i