Property changes on: . ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk:r889431-889432 Property changes on: CHANGES.txt ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/CHANGES.txt:r889431-889432 Property changes on: src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/src/test/org/apache/lucene/analysis/TestISOLatin1AccentFilter.java:r889431-889432 Property changes on: src/test/org/apache/lucene/index/TestBackwardsCompatibility.java ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java:r889431-889432 Property changes on: src/test/org/apache/lucene/util/TestAttributeSource.java ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/src/test/org/apache/lucene/util/TestAttributeSource.java:r889431-889432 Property changes on: src/test/org/apache/lucene/document/TestNumberTools.java ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/src/test/org/apache/lucene/document/TestNumberTools.java:r889431-889432 Property changes on: src/test/org/apache/lucene/document/TestDateTools.java ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/src/test/org/apache/lucene/document/TestDateTools.java:r889431-889432 Property changes on: src/java/org/apache/lucene/search/MultiTermQueryWrapperFilter.java ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/src/java/org/apache/lucene/search/MultiTermQueryWrapperFilter.java:r889431-889432 Index: src/java/org/apache/lucene/index/SegmentReader.java =================================================================== --- src/java/org/apache/lucene/index/SegmentReader.java (revision 889432) +++ src/java/org/apache/lucene/index/SegmentReader.java (working copy) @@ -931,7 +931,7 @@ } /** - * @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption) + * @see IndexReader#getFieldNames(org.apache.lucene.index.IndexReader.FieldOption) */ @Override public Collection getFieldNames(IndexReader.FieldOption fieldOption) { Index: src/java/org/apache/lucene/index/AllTermDocs.java =================================================================== --- src/java/org/apache/lucene/index/AllTermDocs.java (revision 889432) +++ src/java/org/apache/lucene/index/AllTermDocs.java (working copy) @@ -18,69 +18,19 @@ package org.apache.lucene.index; import org.apache.lucene.util.BitVector; -import java.io.IOException; -class AllTermDocs implements TermDocs { +class AllTermDocs extends AbstractAllTermDocs { + protected BitVector deletedDocs; - protected int maxDoc; - protected int doc = -1; protected AllTermDocs(SegmentReader parent) { + super(parent.maxDoc()); synchronized (parent) { this.deletedDocs = parent.deletedDocs; } - this.maxDoc = parent.maxDoc(); } - public void seek(Term term) throws IOException { - if (term==null) { - doc = -1; - } else { - throw new UnsupportedOperationException(); - } + public boolean isDeleted(int doc) { + return deletedDocs != null && deletedDocs.get(doc); } - - public void seek(TermEnum termEnum) throws IOException { - throw new UnsupportedOperationException(); - } - - public int doc() { - return doc; - } - - public int freq() { - return 1; - } - - public boolean next() throws IOException { - return skipTo(doc+1); - } - - public int read(int[] docs, int[] freqs) throws IOException { - final int length = docs.length; - int i = 0; - while (i < length && doc < maxDoc) { - if (deletedDocs == null || !deletedDocs.get(doc)) { - docs[i] = doc; - freqs[i] = 1; - ++i; - } - doc++; - } - return i; - } - - public boolean skipTo(int target) throws IOException { - doc = target; - while (doc < maxDoc) { - if (deletedDocs == null || !deletedDocs.get(doc)) { - return true; - } - doc++; - } - return false; - } - - public void close() throws IOException { - } } Property changes on: build.xml ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/build.xml:r889431-889432 Property changes on: contrib ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/contrib:r889431-889432 Index: contrib/CHANGES.txt =================================================================== --- contrib/CHANGES.txt (revision 889432) +++ contrib/CHANGES.txt (working copy) @@ -2,6 +2,12 @@ ======================= 3.0 branch (not yet released) ======================= +Bug fixes + + * LUCENE-2144: Fix InstantiatedIndex to handle termDocs(null) + correctly (enumerate all non-deleted docs). (Karl Wettin via Mike + McCandless) + API Changes * LUCENE-2108: Add SpellChecker.close, to close the underlying Property changes on: contrib/CHANGES.txt ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/contrib/CHANGES.txt:r889431-889432 Index: contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java =================================================================== --- contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (revision 889432) +++ contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (working copy) @@ -41,8 +41,6 @@ import org.apache.lucene.index.TermPositions; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.AttributeImpl; /** @@ -104,53 +102,144 @@ } instantiatedIndexWriter.close(); + testEqualBehaviour(dir, ii); - testTermDocs(dir, ii); } - private void testTermDocs(Directory aprioriIndex, InstantiatedIndex testIndex) throws Exception { + private void testTermDocsSomeMore(Directory aprioriIndex, InstantiatedIndex testIndex) throws Exception { IndexReader aprioriReader = IndexReader.open(aprioriIndex, false); IndexReader testReader = testIndex.indexReaderFactory(); - TermEnum aprioriTermEnum = aprioriReader.terms(new Term("c", "danny")); + // test seek - TermDocs aprioriTermDocs = aprioriReader.termDocs(aprioriTermEnum.term()); - TermDocs testTermDocs = testReader.termDocs(aprioriTermEnum.term()); + Term t = new Term("c", "danny"); + TermEnum aprioriTermEnum = aprioriReader.terms(t); + TermEnum testTermEnum = testReader.terms(t); - assertEquals(aprioriTermDocs.next(), testTermDocs.next()); - assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + assertEquals(aprioriTermEnum.term(), testTermEnum.term()); - assertEquals(aprioriTermDocs.skipTo(100), testTermDocs.skipTo(100)); - assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + t = aprioriTermEnum.term(); - assertEquals(aprioriTermDocs.next(), testTermDocs.next()); - assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + aprioriTermEnum.close(); + testTermEnum.close(); + TermDocs aprioriTermDocs = aprioriReader.termDocs(t); + TermDocs testTermDocs = testReader.termDocs(t); + assertEquals(aprioriTermDocs.next(), testTermDocs.next()); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); - assertEquals(aprioriTermDocs.skipTo(110), testTermDocs.skipTo(110)); - assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + if (aprioriTermDocs.skipTo(4)) { + assertTrue(testTermDocs.skipTo(4)); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.skipTo(4)); + } - assertEquals(aprioriTermDocs.skipTo(10), testTermDocs.skipTo(10)); - assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + if (aprioriTermDocs.next()) { + assertTrue(testTermDocs.next()); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.next()); + } - assertEquals(aprioriTermDocs.skipTo(210), testTermDocs.skipTo(210)); - assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + // beyond this point all next and skipto will return false + + if (aprioriTermDocs.skipTo(100)) { + assertTrue(testTermDocs.skipTo(100)); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.skipTo(100)); + } + + + if (aprioriTermDocs.next()) { + assertTrue(testTermDocs.next()); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.next()); + } + + if (aprioriTermDocs.skipTo(110)) { + assertTrue(testTermDocs.skipTo(110)); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.skipTo(110)); + } + + if (aprioriTermDocs.skipTo(10)) { + assertTrue(testTermDocs.skipTo(10)); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.skipTo(10)); + } + + + if (aprioriTermDocs.skipTo(210)) { + assertTrue(testTermDocs.skipTo(210)); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } else { + assertFalse(testTermDocs.skipTo(210)); + } + aprioriTermDocs.close(); - aprioriReader.close(); + testTermDocs.close(); + + + // test seek null (AllTermDocs) + aprioriTermDocs = aprioriReader.termDocs(null); + testTermDocs = testReader.termDocs(null); + + while (aprioriTermDocs.next()) { + assertTrue(testTermDocs.next()); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } + assertFalse(testTermDocs.next()); + + + aprioriTermDocs.close(); testTermDocs.close(); + + + // test seek default + aprioriTermDocs = aprioriReader.termDocs(); + testTermDocs = testReader.termDocs(); + // todo consider seeking and skipping some too + + while (aprioriTermDocs.next()) { + assertTrue(testTermDocs.next()); + assertEquals(aprioriTermDocs.freq(), testTermDocs.freq()); + assertEquals(aprioriTermDocs.doc(), testTermDocs.doc()); + } + assertFalse(testTermDocs.next()); + + aprioriTermDocs.close(); + testTermDocs.close(); + + + // clean up + aprioriReader.close(); testReader.close(); } + private void assembleDocument(Document document, int i) { document.add(new Field("a", i + " Do you really want to go and live in that house all winter?", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); if (i > 0) { @@ -213,25 +302,66 @@ */ protected void testEqualBehaviour(Directory aprioriIndex, InstantiatedIndex testIndex) throws Exception { + testEquals(aprioriIndex, testIndex); - // delete a few documents - IndexReader ir = IndexReader.open(aprioriIndex, false); - ir.deleteDocument(3); - ir.deleteDocument(8); - ir.close(); + // delete a few documents + IndexReader air = IndexReader.open(aprioriIndex, false); + InstantiatedIndexReader tir = testIndex.indexReaderFactory(); - ir = testIndex.indexReaderFactory(); - ir.deleteDocument(3); - ir.deleteDocument(8); - ir.close(); + assertEquals(air.isCurrent(), tir.isCurrent()); + assertEquals(air.hasDeletions(), tir.hasDeletions()); + assertEquals(air.maxDoc(), tir.maxDoc()); + assertEquals(air.numDocs(), tir.numDocs()); + assertEquals(air.numDeletedDocs(), tir.numDeletedDocs()); + air.deleteDocument(3); + tir.deleteDocument(3); + + assertEquals(air.isCurrent(), tir.isCurrent()); + assertEquals(air.hasDeletions(), tir.hasDeletions()); + assertEquals(air.maxDoc(), tir.maxDoc()); + assertEquals(air.numDocs(), tir.numDocs()); + assertEquals(air.numDeletedDocs(), tir.numDeletedDocs()); + + air.deleteDocument(8); + tir.deleteDocument(8); + + assertEquals(air.isCurrent(), tir.isCurrent()); + assertEquals(air.hasDeletions(), tir.hasDeletions()); + assertEquals(air.maxDoc(), tir.maxDoc()); + assertEquals(air.numDocs(), tir.numDocs()); + assertEquals(air.numDeletedDocs(), tir.numDeletedDocs()); + + // this (in 3.0) commits the deletions + air.close(); + tir.close(); + + air = IndexReader.open(aprioriIndex, false); + tir = testIndex.indexReaderFactory(); + + assertEquals(air.isCurrent(), tir.isCurrent()); + assertEquals(air.hasDeletions(), tir.hasDeletions()); + assertEquals(air.maxDoc(), tir.maxDoc()); + assertEquals(air.numDocs(), tir.numDocs()); + assertEquals(air.numDeletedDocs(), tir.numDeletedDocs()); + + for (int d =0; d deletedDocuments = new HashSet(); - private Set deletedDocumentNumbers = new HashSet(); - private Map> updatedNormsByFieldNameAndDocumentNumber = null; + private BitVector uncommittedDeletedDocuments; + private Map> uncommittedNormsByFieldNameAndDocumentNumber = null; + private class NormUpdate { private int doc; private byte value; @@ -121,7 +114,15 @@ @Override public int numDocs() { - return getIndex().getDocumentsByNumber().length - index.getDeletedDocuments().size() - deletedDocuments.size(); + // todo i suppose this value could be cached, but array#length and bitvector#count is fast. + int numDocs = getIndex().getDocumentsByNumber().length; + if (uncommittedDeletedDocuments != null) { + numDocs -= uncommittedDeletedDocuments.count(); + } + if (index.getDeletedDocuments() != null) { + numDocs -= index.getDeletedDocuments().count(); + } + return numDocs; } @Override @@ -130,28 +131,39 @@ } @Override - public boolean isDeleted(int n) { - return getIndex().getDeletedDocuments().contains(n) || deletedDocumentNumbers.contains(n); + public boolean hasDeletions() { + return index.getDeletedDocuments() != null || uncommittedDeletedDocuments != null; } + @Override - public boolean hasDeletions() { - return getIndex().getDeletedDocuments().size() > 0 || deletedDocumentNumbers.size() > 0; + public boolean isDeleted(int n) { + return (index.getDeletedDocuments() != null && index.getDeletedDocuments().get(n)) + || (uncommittedDeletedDocuments != null && uncommittedDeletedDocuments.get(n)); } + @Override protected void doDelete(int docNum) throws IOException { - if (!getIndex().getDeletedDocuments().contains(docNum)) { - if (deletedDocumentNumbers.add(docNum)) { - deletedDocuments.add(getIndex().getDocumentsByNumber()[docNum]); - } + + // dont delete if already deleted + if ((index.getDeletedDocuments() != null && index.getDeletedDocuments().get(docNum)) + || (uncommittedDeletedDocuments != null && uncommittedDeletedDocuments.get(docNum))) { + return; } + + if (uncommittedDeletedDocuments == null) { + uncommittedDeletedDocuments = new BitVector(maxDoc()); + } + + uncommittedDeletedDocuments.set(docNum); } @Override protected void doUndeleteAll() throws IOException { - deletedDocumentNumbers.clear(); - deletedDocuments.clear(); + // todo: read/write lock + uncommittedDeletedDocuments = null; + // todo: read/write unlock } @Override @@ -161,25 +173,30 @@ boolean updated = false; // 1. update norms - if (updatedNormsByFieldNameAndDocumentNumber != null) { - for (Map.Entry> e : updatedNormsByFieldNameAndDocumentNumber.entrySet()) { + if (uncommittedNormsByFieldNameAndDocumentNumber != null) { + for (Map.Entry> e : uncommittedNormsByFieldNameAndDocumentNumber.entrySet()) { byte[] norms = getIndex().getNormsByFieldNameAndDocumentNumber().get(e.getKey()); for (NormUpdate normUpdate : e.getValue()) { norms[normUpdate.doc] = normUpdate.value; } } - updatedNormsByFieldNameAndDocumentNumber = null; + uncommittedNormsByFieldNameAndDocumentNumber = null; updated = true; } // 2. remove deleted documents - if (deletedDocumentNumbers.size() > 0) { - for (Integer doc : deletedDocumentNumbers) { - getIndex().getDeletedDocuments().add(doc); + if (uncommittedDeletedDocuments != null) { + if (index.getDeletedDocuments() == null) { + index.setDeletedDocuments(uncommittedDeletedDocuments); + } else { + for (int d = 0; d< uncommittedDeletedDocuments.size(); d++) { + if (uncommittedDeletedDocuments.get(d)) { + index.getDeletedDocuments().set(d); + } + } } - deletedDocumentNumbers.clear(); - deletedDocuments.clear(); + uncommittedDeletedDocuments = null; updated = true; @@ -299,9 +316,9 @@ if (norms == null) { return new byte[0]; // todo a static final zero length attribute? } - if (updatedNormsByFieldNameAndDocumentNumber != null) { + if (uncommittedNormsByFieldNameAndDocumentNumber != null) { norms = norms.clone(); - List updated = updatedNormsByFieldNameAndDocumentNumber.get(field); + List updated = uncommittedNormsByFieldNameAndDocumentNumber.get(field); if (updated != null) { for (NormUpdate normUpdate : updated) { norms[normUpdate.doc] = normUpdate.value; @@ -322,13 +339,13 @@ @Override protected void doSetNorm(int doc, String field, byte value) throws IOException { - if (updatedNormsByFieldNameAndDocumentNumber == null) { - updatedNormsByFieldNameAndDocumentNumber = new HashMap>(getIndex().getNormsByFieldNameAndDocumentNumber().size()); + if (uncommittedNormsByFieldNameAndDocumentNumber == null) { + uncommittedNormsByFieldNameAndDocumentNumber = new HashMap>(getIndex().getNormsByFieldNameAndDocumentNumber().size()); } - List list = updatedNormsByFieldNameAndDocumentNumber.get(field); + List list = uncommittedNormsByFieldNameAndDocumentNumber.get(field); if (list == null) { list = new LinkedList(); - updatedNormsByFieldNameAndDocumentNumber.put(field, list); + uncommittedNormsByFieldNameAndDocumentNumber.put(field, list); } list.add(new NormUpdate(doc, value)); } @@ -367,7 +384,19 @@ return new InstantiatedTermDocs(this); } + @Override + public TermDocs termDocs(Term term) throws IOException { + if (term == null) { + return new InstantiatedAllTermDocs(this); + } else { + InstantiatedTermDocs termDocs = new InstantiatedTermDocs(this); + termDocs.seek(term); + return termDocs; + } + } + + @Override public TermPositions termPositions() throws IOException { return new InstantiatedTermPositions(this); } @@ -411,7 +440,7 @@ @Override public void getTermFreqVector(int docNumber, TermVectorMapper mapper) throws IOException { InstantiatedDocument doc = getIndex().getDocumentsByNumber()[docNumber]; - for (Map.Entry> e : doc.getVectorSpace().entrySet()) { + for (Map.Entry> e : doc.getVectorSpace().entrySet()) { mapper.setExpectations(e.getKey(), e.getValue().size(), true, true); for (InstantiatedTermDocumentInformation tdi : e.getValue()) { mapper.map(tdi.getTerm().text(), tdi.getTermPositions().length, tdi.getTermOffsets(), tdi.getTermPositions()); Index: contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedIndexWriter.java =================================================================== --- contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedIndexWriter.java (revision 889432) +++ contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedIndexWriter.java (working copy) @@ -47,6 +47,7 @@ import org.apache.lucene.search.Similarity; import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.AttributeImpl; +import org.apache.lucene.util.BitVector; /** * This class, similar to {@link org.apache.lucene.index.IndexWriter}, has no locking mechanism. @@ -407,6 +408,18 @@ termDocumentInformationFactoryByDocument.clear(); fieldNameBuffer.clear(); + + // update deleted documents bitset + if (index.getDeletedDocuments() != null) { + BitVector deletedDocuments = new BitVector(index.getDocumentsByNumber().length); + for (int i = 0; i < index.getDeletedDocuments().size(); i++) { + if (index.getDeletedDocuments().get(i)) { + deletedDocuments.set(i); + } + } + index.setDeletedDocuments(deletedDocuments); + } + index.setVersion(System.currentTimeMillis()); // todo unlock Index: contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedIndex.java =================================================================== --- contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedIndex.java (revision 889432) +++ contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedIndex.java (working copy) @@ -35,6 +35,7 @@ import org.apache.lucene.index.TermEnum; import org.apache.lucene.index.TermPositionVector; import org.apache.lucene.index.TermPositions; +import org.apache.lucene.util.BitVector; /** * Represented as a coupled graph of class instances, this @@ -61,8 +62,7 @@ private InstantiatedDocument[] documentsByNumber; - /** todo: should this be a BitSet? */ - private Set deletedDocuments; + private BitVector deletedDocuments; private Map> termsByFieldAndText; private InstantiatedTerm[] orderedTerms; @@ -85,7 +85,6 @@ orderedTerms = new InstantiatedTerm[0]; documentsByNumber = new InstantiatedDocument[0]; normsByFieldNameAndDocumentNumber = new HashMap(); - deletedDocuments = new HashSet(); } @@ -174,11 +173,14 @@ documentsByNumber = new InstantiatedDocument[sourceIndexReader.maxDoc()]; + if (sourceIndexReader.hasDeletions()) { + deletedDocuments = new BitVector(sourceIndexReader.maxDoc()); + } // create documents for (int i = 0; i < sourceIndexReader.maxDoc(); i++) { - if (sourceIndexReader.isDeleted(i)) { - deletedDocuments.add(i); + if (sourceIndexReader.hasDeletions() && sourceIndexReader.isDeleted(i)) { + deletedDocuments.set(i); } else { InstantiatedDocument document = new InstantiatedDocument(); // copy stored fields from source reader @@ -329,10 +331,13 @@ this.normsByFieldNameAndDocumentNumber = normsByFieldNameAndDocumentNumber; } - public Set getDeletedDocuments() { + public BitVector getDeletedDocuments() { return deletedDocuments; } + void setDeletedDocuments(BitVector deletedDocuments) { + this.deletedDocuments = deletedDocuments; + } void setOrderedTerms(InstantiatedTerm[] orderedTerms) { this.orderedTerms = orderedTerms; Index: contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermDocs.java =================================================================== --- contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermDocs.java (revision 889432) +++ contrib/instantiated/src/java/org/apache/lucene/store/instantiated/InstantiatedTermDocs.java (working copy) @@ -60,11 +60,14 @@ currentDocumentIndex++; if (currentDocumentIndex < currentTerm.getAssociatedDocuments().length) { currentDocumentInformation = currentTerm.getAssociatedDocuments()[currentDocumentIndex]; - if (reader.hasDeletions() && reader.isDeleted(currentDocumentInformation.getDocument().getDocumentNumber())) { + if (reader.isDeleted(currentDocumentInformation.getDocument().getDocumentNumber())) { return next(); } else { return true; } + } else { + // mimic SegmentTermDocs + currentDocumentIndex = currentTerm.getAssociatedDocuments().length -1; } } return false; @@ -111,6 +114,8 @@ int pos = currentTerm.seekCeilingDocumentInformationIndex(target, startOffset); if (pos == -1) { + // mimic SegmentTermDocs that positions at the last index + currentDocumentIndex = currentTerm.getAssociatedDocuments().length -1; return false; } Property changes on: contrib/highlighter/src/test ___________________________________________________________________ Modified: svn:mergeinfo Merged /lucene/java/trunk/contrib/highlighter/src/test:r889431-889432