Index: src/test/org/apache/lucene/index/TestDocumentWriter.java =================================================================== --- src/test/org/apache/lucene/index/TestDocumentWriter.java (revision 792244) +++ src/test/org/apache/lucene/index/TestDocumentWriter.java (working copy) @@ -97,8 +97,8 @@ // test that the norms are not present in the segment if // omitNorms is true - for (int i = 0; i < reader.fieldInfos.size(); i++) { - FieldInfo fi = reader.fieldInfos.fieldInfo(i); + for (int i = 0; i < reader.core.fieldInfos.size(); i++) { + FieldInfo fi = reader.core.fieldInfos.fieldInfo(i); if (fi.isIndexed) { assertTrue(fi.omitNorms == !reader.hasNorms(fi.name)); } Index: src/test/org/apache/lucene/index/TestIndexWriterReader.java =================================================================== --- src/test/org/apache/lucene/index/TestIndexWriterReader.java (revision 792244) +++ src/test/org/apache/lucene/index/TestIndexWriterReader.java (working copy) @@ -42,7 +42,7 @@ public class TestIndexWriterReader extends LuceneTestCase { static PrintStream infoStream; - private static class HeavyAtomicInt { + public static class HeavyAtomicInt { private int value; public HeavyAtomicInt(int start) { value = start; Index: src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java =================================================================== --- src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 0) +++ src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 0) @@ -0,0 +1,111 @@ +package org.apache.lucene.index; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.Random; + +import org.apache.lucene.analysis.WhitespaceAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.MockRAMDirectory; +import org.apache.lucene.util.LuceneTestCase; + +public class TestNRTReaderWithThreads extends LuceneTestCase { + Random random = new Random(); + HeavyAtomicInt seq = new HeavyAtomicInt(1); + + public void testIndexing() throws Exception { + Directory mainDir = new MockRAMDirectory(); + IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(), + IndexWriter.MaxFieldLength.LIMITED); + writer.setUseCompoundFile(false); + IndexReader reader = writer.getReader(); // start pooling readers + reader.close(); + writer.setMergeFactor(2); + writer.setMaxBufferedDocs(10); + RunThread[] indexThreads = new RunThread[4]; + for (int x=0; x < indexThreads.length; x++) { + indexThreads[x] = new RunThread(x % 2, writer); + indexThreads[x].setName("Thread " + x); + indexThreads[x].start(); + } + long startTime = System.currentTimeMillis(); + long duration = 5*1000; + while ((System.currentTimeMillis() - startTime) < duration) { + Thread.sleep(100); + } + int delCount = 0; + int addCount = 0; + for (int x=0; x < indexThreads.length; x++) { + indexThreads[x].run = false; + assertTrue(indexThreads[x].ex == null); + addCount += indexThreads[x].addCount; + delCount += indexThreads[x].delCount; + } + for (int x=0; x < indexThreads.length; x++) { + indexThreads[x].join(); + } + //System.out.println("addCount:"+addCount); + //System.out.println("delCount:"+delCount); + writer.close(); + mainDir.close(); + } + + public class RunThread extends Thread { + IndexWriter writer; + boolean run = true; + Throwable ex; + int delCount = 0; + int addCount = 0; + int type; + + public RunThread(int type, IndexWriter writer) { + this.type = type; + this.writer = writer; + } + + public void run() { + try { + while (run) { + //int n = random.nextInt(2); + if (type == 0) { + int i = seq.addAndGet(1); + Document doc = TestIndexWriterReader.createDocument(i, "index1", 10); + writer.addDocument(doc); + addCount++; + } else if (type == 1) { + // we may or may not delete because the term may not exist, + // however we're opening and closing the reader rapidly + IndexReader reader = writer.getReader(); + int id = random.nextInt(seq.intValue()); + Term term = new Term("id", Integer.toString(id)); + int count = TestIndexWriterReader.count(term, reader); + writer.deleteDocuments(term); + reader.close(); + delCount += count; + } + } + } catch (Throwable ex) { + ex.printStackTrace(System.out); + this.ex = ex; + run = false; + } + } + } +} Property changes on: src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java ___________________________________________________________________ Added: svn:eol-style + native Index: src/test/org/apache/lucene/index/TestLazyProxSkipping.java =================================================================== --- src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 792244) +++ src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy) @@ -43,12 +43,24 @@ private String term1 = "xx"; private String term2 = "yy"; private String term3 = "zz"; + + private class SeekCountingDirectory extends RAMDirectory { + public IndexInput openInput(String name) throws IOException { + IndexInput ii = super.openInput(name); + if (name.endsWith(".prx")) { + // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek() + ii = new SeeksCountingStream(ii); + } + return ii; + } + } private void createIndex(int numHits) throws IOException { int numDocs = 500; - Directory directory = new RAMDirectory(); + Directory directory = new SeekCountingDirectory(); IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); + writer.setUseCompoundFile(false); writer.setMaxBufferedDocs(10); for (int i = 0; i < numDocs; i++) { Document doc = new Document(); @@ -74,9 +86,6 @@ SegmentReader reader = SegmentReader.getOnlySegmentReader(directory); - // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek() - reader.proxStream = new SeeksCountingStream(reader.proxStream); - this.searcher = new IndexSearcher(reader); } @@ -96,6 +105,7 @@ assertEquals(numHits, hits.length); // check if the number of calls of seek() does not exceed the number of hits + assertTrue(this.seeksCounter > 0); assertTrue(this.seeksCounter <= numHits + 1); } Index: src/java/org/apache/lucene/index/SegmentTermPositions.java =================================================================== --- src/java/org/apache/lucene/index/SegmentTermPositions.java (revision 792244) +++ src/java/org/apache/lucene/index/SegmentTermPositions.java (working copy) @@ -146,7 +146,7 @@ private void lazySkip() throws IOException { if (proxStream == null) { // clone lazily - proxStream = (IndexInput)parent.proxStream.clone(); + proxStream = (IndexInput) parent.core.proxStream.clone(); } // we might have to skip the current payload Index: src/java/org/apache/lucene/index/SegmentReader.java =================================================================== --- src/java/org/apache/lucene/index/SegmentReader.java (revision 792244) +++ src/java/org/apache/lucene/index/SegmentReader.java (working copy) @@ -40,18 +40,12 @@ /** @version $Id */ class SegmentReader extends IndexReader implements Cloneable { - protected Directory directory; protected boolean readOnly; - private String segment; private SegmentInfo si; private int readBufferSize; - FieldInfos fieldInfos; - private FieldsReader fieldsReaderOrig = null; CloseableThreadLocal fieldsReaderLocal = new FieldsReaderLocal(); - TermInfosReader tis; - TermVectorsReader termVectorsReaderOrig = null; CloseableThreadLocal termVectorsLocal = new CloseableThreadLocal(); BitVector deletedDocs = null; @@ -64,31 +58,197 @@ private boolean rollbackDeletedDocsDirty = false; private boolean rollbackNormsDirty = false; private int rollbackPendingDeleteCount; - IndexInput freqStream; - IndexInput proxStream; // optionally used for the .nrm file shared by multiple norms private IndexInput singleNormStream; private Ref singleNormRef; - // Counts how many other reader share the core objects - // (freqStream, proxStream, tis, etc.) of this reader; - // when coreRef drops to 0, these core objects may be - // closed. A given insance of SegmentReader may be - // closed, even those it shares core objects with other - // SegmentReaders: - private Ref coreRef = new Ref(); + CoreReaders core; - // Compound File Reader when based on a compound file segment - CompoundFileReader cfsReader = null; - CompoundFileReader storeCFSReader = null; + // Holds core readers that are shared (unchanged) when + // SegmentReader is cloned or reopened + static final class CoreReaders { + + // Counts how many other reader share the core objects + // (freqStream, proxStream, tis, etc.) of this reader; + // when coreRef drops to 0, these core objects may be + // closed. A given insance of SegmentReader may be + // closed, even those it shares core objects with other + // SegmentReaders: + private final Ref ref = new Ref(); + + final String segment; + final FieldInfos fieldInfos; + final IndexInput freqStream; + final IndexInput proxStream; + + final Directory dir; + final Directory cfsDir; + final int readBufferSize; + + TermInfosReader tis; + FieldsReader fieldsReaderOrig; + TermVectorsReader termVectorsReaderOrig; + CompoundFileReader cfsReader; + CompoundFileReader storeCFSReader; + + CoreReaders(Directory dir, SegmentInfo si, int readBufferSize) throws IOException { + segment = si.name; + this.readBufferSize = readBufferSize; + this.dir = dir; + + boolean success = false; + + try { + Directory dir0 = dir; + if (si.getUseCompoundFile()) { + cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize); + dir0 = cfsReader; + } + cfsDir = dir0; + + fieldInfos = new FieldInfos(cfsDir, segment + "." + IndexFileNames.FIELD_INFOS_EXTENSION); + + tis = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize); + + boolean anyProx = false; + final int numFields = fieldInfos.size(); + for(int i=0;!anyProx && i 0); assertTrue(this.seeksCounter <= numHits + 1); } Index: tags/lucene_2_4_back_compat_tests_20090704/src/java/org/apache/lucene/index/SegmentReader.java =================================================================== --- tags/lucene_2_4_back_compat_tests_20090704/src/java/org/apache/lucene/index/SegmentReader.java (revision 792498) +++ tags/lucene_2_4_back_compat_tests_20090704/src/java/org/apache/lucene/index/SegmentReader.java (working copy) @@ -78,7 +78,27 @@ // indicates the SegmentReader with which the resources are being shared, // in case this is a re-opened reader private SegmentReader referencedSegmentReader = null; - + + // stub + static final class CoreReaders { + // Counts how many other reader share the core objects + // (freqStream, proxStream, tis, etc.) of this reader; + // when coreRef drops to 0, these core objects may be + // closed. A given insance of SegmentReader may be + // closed, even those it shares core objects with other + // SegmentReaders: + FieldsReader fieldsReaderOrig; + TermVectorsReader termVectorsReaderOrig; + CompoundFileReader cfsReader; + CompoundFileReader storeCFSReader; + TermInfosReader tis; + IndexInput freqStream; + IndexInput proxStream; + FieldInfos fieldInfos; + } + + CoreReaders core; + private class Norm { volatile int refCount; boolean useSingleNormStream;