Index: lucene/core/src/java/org/apache/lucene/index/CompositeReader.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/index/CompositeReader.java (revision 1372657)
+++ lucene/core/src/java/org/apache/lucene/index/CompositeReader.java (working copy)
@@ -78,13 +78,16 @@
}
/** Expert: returns the sequential sub readers that this
- * reader is logically composed of. It contrast to previous
- * Lucene versions may not return null.
- * If this method returns an empty array, that means this
- * reader is a null reader (for example a MultiReader
- * that has no sub readers).
+ * reader is logically composed of. This method may not
+ * return {@code null}.
+ *
+ *
In contrast to previous Lucene versions this method is no
+ * longer public, code that wants to get all {@link AtomicReader}s
+ * this composite is composed of should use {@link IndexReader#leaves()}.
+ *
+ * @lucene.internal
*/
- public abstract List extends IndexReader> getSequentialSubReaders();
+ protected abstract List extends IndexReader> getSequentialSubReaders();
@Override
public final CompositeReaderContext getTopReaderContext() {
Index: lucene/core/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java
===================================================================
--- lucene/core/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java (revision 1372657)
+++ lucene/core/src/java/org/apache/lucene/util/FieldCacheSanityChecker.java (working copy)
@@ -25,8 +25,10 @@
import org.apache.lucene.index.CompositeReader;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.CacheEntry;
+import org.apache.lucene.store.AlreadyClosedException;
/**
* Provides methods for sanity checking that entries in the FieldCache
@@ -278,14 +280,18 @@
List all = new ArrayList(17); // will grow as we iter
all.add(seed);
for (int i = 0; i < all.size(); i++) {
- Object obj = all.get(i);
- if (obj instanceof CompositeReader) {
- List extends IndexReader> subs = ((CompositeReader)obj).getSequentialSubReaders();
- for (int j = 0; (null != subs) && (j < subs.size()); j++) {
- all.add(subs.get(j).getCoreCacheKey());
+ final Object obj = all.get(i);
+ // nocommit: We don't check closed readers here (as getTopReaderContext
+ // throws AlreadyClosedEx), what should we do? Reflection?
+ try {
+ if (obj instanceof CompositeReader) {
+ for (final IndexReaderContext ctx : ((IndexReader) obj).getTopReaderContext().children()) {
+ all.add(ctx.reader().getCoreCacheKey());
+ }
}
+ } catch (AlreadyClosedException ace) {
+ // ignore this reader
}
-
}
// need to skip the first, because it was the seed
return all.subList(1, all.size());
Index: lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java (working copy)
@@ -23,9 +23,9 @@
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.AtomicReader;
+import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
@@ -50,7 +50,8 @@
writer.commit();
DirectoryReader open = DirectoryReader.open(dir);
- for (AtomicReader indexReader : open.getSequentialSubReaders()) {
+ for (AtomicReaderContext ctx : open.leaves()) {
+ AtomicReader indexReader = ctx.reader();
Terms terms = indexReader.terms("body");
TermsEnum iterator = terms.iterator(null);
IdentityHashMap enums = new IdentityHashMap();
@@ -76,8 +77,8 @@
writer.commit();
DirectoryReader open = DirectoryReader.open(dir);
- for (AtomicReader indexReader : open.getSequentialSubReaders()) {
- Terms terms = indexReader.terms("body");
+ for (AtomicReaderContext ctx : open.leaves()) {
+ Terms terms = ctx.reader().terms("body");
TermsEnum iterator = terms.iterator(null);
IdentityHashMap enums = new IdentityHashMap();
MatchNoBits bits = new Bits.MatchNoBits(open.maxDoc());
@@ -121,11 +122,11 @@
DirectoryReader firstReader = DirectoryReader.open(dir);
DirectoryReader secondReader = DirectoryReader.open(dir);
- List extends AtomicReader> sequentialSubReaders = firstReader.getSequentialSubReaders();
- List extends AtomicReader> sequentialSubReaders2 = secondReader.getSequentialSubReaders();
+ List leaves = firstReader.leaves();
+ List leaves2 = secondReader.leaves();
- for (IndexReader indexReader : sequentialSubReaders) {
- Terms terms = ((AtomicReader) indexReader).terms("body");
+ for (AtomicReaderContext ctx : leaves) {
+ Terms terms = ctx.reader().terms("body");
TermsEnum iterator = terms.iterator(null);
IdentityHashMap enums = new IdentityHashMap();
MatchNoBits bits = new Bits.MatchNoBits(firstReader.maxDoc());
@@ -133,7 +134,7 @@
DocsEnum docs = null;
BytesRef term = null;
while ((term = iterator.next()) != null) {
- docs = iterator.docs(null, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
+ docs = iterator.docs(null, randomDocsEnum("body", term, leaves2, bits), random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@@ -142,7 +143,7 @@
enums.clear();
docs = null;
while ((term = iterator.next()) != null) {
- docs = iterator.docs(bits, randomDocsEnum("body", term, sequentialSubReaders2, bits), random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
+ docs = iterator.docs(bits, randomDocsEnum("body", term, leaves2, bits), random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
enums.put(docs, true);
}
assertEquals(terms.size(), enums.size());
@@ -150,11 +151,11 @@
IOUtils.close(writer, firstReader, secondReader, dir);
}
- public DocsEnum randomDocsEnum(String field, BytesRef term, List extends AtomicReader> readers, Bits bits) throws IOException {
+ public DocsEnum randomDocsEnum(String field, BytesRef term, List readers, Bits bits) throws IOException {
if (random().nextInt(10) == 0) {
return null;
}
- AtomicReader indexReader = (AtomicReader) readers.get(random().nextInt(readers.size()));
+ AtomicReader indexReader = readers.get(random().nextInt(readers.size())).reader();
return indexReader.termDocsEnum(bits, field, term, random().nextBoolean() ? DocsEnum.FLAG_FREQS : 0);
}
Index: lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java (working copy)
@@ -68,7 +68,7 @@
public void onCommit(List extends IndexCommit> commits) throws IOException {
IndexCommit lastCommit = commits.get(commits.size()-1);
DirectoryReader r = DirectoryReader.open(dir);
- assertEquals("lastCommit.segmentCount()=" + lastCommit.getSegmentCount() + " vs IndexReader.segmentCount=" + r.getSequentialSubReaders().size(), r.getSequentialSubReaders().size(), lastCommit.getSegmentCount());
+ assertEquals("lastCommit.segmentCount()=" + lastCommit.getSegmentCount() + " vs IndexReader.segmentCount=" + r.leaves().size(), r.leaves().size(), lastCommit.getSegmentCount());
r.close();
verifyCommitOrder(commits);
numOnCommit++;
@@ -318,7 +318,7 @@
final boolean needsMerging;
{
DirectoryReader r = DirectoryReader.open(dir);
- needsMerging = r.getSequentialSubReaders().size() != 1;
+ needsMerging = r.leaves().size() != 1;
r.close();
}
if (needsMerging) {
@@ -435,7 +435,7 @@
DirectoryReader r = DirectoryReader.open(dir);
// Still merged, still 11 docs
- assertEquals(1, r.getSequentialSubReaders().size());
+ assertEquals(1, r.leaves().size());
assertEquals(11, r.numDocs());
r.close();
@@ -451,7 +451,7 @@
r = DirectoryReader.open(dir);
// Not fully merged because we rolled it back, and now only
// 10 docs
- assertTrue(r.getSequentialSubReaders().size() > 1);
+ assertTrue(r.leaves().size() > 1);
assertEquals(10, r.numDocs());
r.close();
@@ -461,7 +461,7 @@
writer.close();
r = DirectoryReader.open(dir);
- assertEquals(1, r.getSequentialSubReaders().size());
+ assertEquals(1, r.leaves().size());
assertEquals(10, r.numDocs());
r.close();
@@ -473,7 +473,7 @@
// Reader still sees fully merged index, because writer
// opened on the prior commit has not yet committed:
r = DirectoryReader.open(dir);
- assertEquals(1, r.getSequentialSubReaders().size());
+ assertEquals(1, r.leaves().size());
assertEquals(10, r.numDocs());
r.close();
@@ -481,7 +481,7 @@
// Now reader sees not-fully-merged index:
r = DirectoryReader.open(dir);
- assertTrue(r.getSequentialSubReaders().size() > 1);
+ assertTrue(r.leaves().size() > 1);
assertEquals(10, r.numDocs());
r.close();
Index: lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestDirectoryReader.java (working copy)
@@ -549,7 +549,7 @@
assertEquals("IndexReaders have different values for numDocs.", index1.numDocs(), index2.numDocs());
assertEquals("IndexReaders have different values for maxDoc.", index1.maxDoc(), index2.maxDoc());
assertEquals("Only one IndexReader has deletions.", index1.hasDeletions(), index2.hasDeletions());
- assertEquals("Single segment test differs.", index1.getSequentialSubReaders().size() == 1, index2.getSequentialSubReaders().size() == 1);
+ assertEquals("Single segment test differs.", index1.leaves().size() == 1, index2.leaves().size() == 1);
// check field names
FieldInfos fieldInfos1 = MultiFields.getMergedFieldInfos(index1);
@@ -785,7 +785,7 @@
DirectoryReader r2 = DirectoryReader.openIfChanged(r);
assertNotNull(r2);
r.close();
- AtomicReader sub0 = r2.getSequentialSubReaders().get(0);
+ AtomicReader sub0 = r2.leaves().get(0).reader();
final int[] ints2 = FieldCache.DEFAULT.getInts(sub0, "number", false);
r2.close();
assertTrue(ints == ints2);
@@ -814,9 +814,8 @@
assertNotNull(r2);
r.close();
- List extends AtomicReader> subs = r2.getSequentialSubReaders();
- for(AtomicReader s : subs) {
- assertEquals(36, s.getUniqueTermCount());
+ for(AtomicReaderContext s : r2.leaves()) {
+ assertEquals(36, s.reader().getUniqueTermCount());
}
r2.close();
writer.close();
@@ -842,7 +841,7 @@
// expected
}
- assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders().get(0)).getTermInfosIndexDivisor());
+ assertEquals(-1, ((SegmentReader) r.leaves().get(0).reader()).getTermInfosIndexDivisor());
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
@@ -857,11 +856,11 @@
assertNotNull(r2);
assertNull(DirectoryReader.openIfChanged(r2));
r.close();
- List extends AtomicReader> subReaders = r2.getSequentialSubReaders();
- assertEquals(2, subReaders.size());
- for(AtomicReader s : subReaders) {
+ List leaves = r2.leaves();
+ assertEquals(2, leaves.size());
+ for(AtomicReaderContext ctx : leaves) {
try {
- s.docFreq(new Term("field", "f"));
+ ctx.reader().docFreq(new Term("field", "f"));
fail("did not hit expected exception");
} catch (IllegalStateException ise) {
// expected
Index: lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java (working copy)
@@ -171,8 +171,8 @@
TestDirectoryReader.assertIndexEquals(index1, index2_refreshed);
index2_refreshed.close();
- assertReaderClosed(index2, true, true);
- assertReaderClosed(index2_refreshed, true, true);
+ assertReaderClosed(index2, true);
+ assertReaderClosed(index2_refreshed, true);
index2 = test.openReader();
@@ -190,30 +190,10 @@
index1.close();
index2.close();
- assertReaderClosed(index1, true, true);
- assertReaderClosed(index2, true, true);
+ assertReaderClosed(index1, true);
+ assertReaderClosed(index2, true);
}
- private void performTestsWithExceptionInReopen(TestReopen test) throws Exception {
- DirectoryReader index1 = test.openReader();
- DirectoryReader index2 = test.openReader();
-
- TestDirectoryReader.assertIndexEquals(index1, index2);
-
- try {
- refreshReader(index1, test, 0, true);
- fail("Expected exception not thrown.");
- } catch (Exception e) {
- // expected exception
- }
-
- // index2 should still be usable and unaffected by the failed reopen() call
- TestDirectoryReader.assertIndexEquals(index1, index2);
-
- index1.close();
- index2.close();
- }
-
public void testThreadSafety() throws Exception {
final Directory dir = newDirectory();
// NOTE: this also controls the number of threads!
@@ -355,11 +335,11 @@
reader.close();
for (final DirectoryReader readerToClose : readersToClose) {
- assertReaderClosed(readerToClose, true, true);
+ assertReaderClosed(readerToClose, true);
}
- assertReaderClosed(reader, true, true);
- assertReaderClosed(firstReader, true, true);
+ assertReaderClosed(reader, true);
+ assertReaderClosed(firstReader, true);
dir.close();
}
@@ -374,7 +354,7 @@
DirectoryReader refreshedReader;
}
- private abstract static class ReaderThreadTask {
+ abstract static class ReaderThreadTask {
protected volatile boolean stopped;
public void stop() {
this.stopped = true;
@@ -384,8 +364,8 @@
}
private static class ReaderThread extends Thread {
- private ReaderThreadTask task;
- private Throwable error;
+ ReaderThreadTask task;
+ Throwable error;
ReaderThread(ReaderThreadTask task) {
@@ -469,9 +449,9 @@
DirectoryReader r = DirectoryReader.open(dir);
if (multiSegment) {
- assertTrue(r.getSequentialSubReaders().size() > 1);
+ assertTrue(r.leaves().size() > 1);
} else {
- assertTrue(r.getSequentialSubReaders().size() == 1);
+ assertTrue(r.leaves().size() == 1);
}
r.close();
}
@@ -533,46 +513,25 @@
}
}
- static void assertReaderClosed(IndexReader reader, boolean checkSubReaders, boolean checkNormsClosed) {
+ static void assertReaderClosed(IndexReader reader, boolean checkSubReaders) {
assertEquals(0, reader.getRefCount());
- if (checkNormsClosed && reader instanceof AtomicReader) {
- // TODO: should we really assert something here? we check for open files and this is obselete...
- // assertTrue(((SegmentReader) reader).normsClosed());
- }
-
if (checkSubReaders && reader instanceof CompositeReader) {
+ // we cannot use reader context here, as reader is
+ // already closed and calling getTopReaderContext() throws AlreadyClosed!
List extends IndexReader> subReaders = ((CompositeReader) reader).getSequentialSubReaders();
- for (IndexReader r : subReaders) {
- assertReaderClosed(r, checkSubReaders, checkNormsClosed);
+ for (final IndexReader r : subReaders) {
+ assertReaderClosed(r, checkSubReaders);
}
}
}
- /*
- private void assertReaderOpen(DirectoryReader reader) {
- reader.ensureOpen();
-
- if (reader instanceof DirectoryReader) {
- DirectoryReader[] subReaders = reader.getSequentialSubReaders();
- for (int i = 0; i < subReaders.length; i++) {
- assertReaderOpen(subReaders[i]);
- }
- }
- }
- */
-
- private void assertRefCountEquals(int refCount, DirectoryReader reader) {
- assertEquals("Reader has wrong refCount value.", refCount, reader.getRefCount());
- }
-
-
- private abstract static class TestReopen {
+ abstract static class TestReopen {
protected abstract DirectoryReader openReader() throws IOException;
protected abstract void modifyIndex(int i) throws IOException;
}
- private static class KeepAllCommits implements IndexDeletionPolicy {
+ static class KeepAllCommits implements IndexDeletionPolicy {
public void onInit(List extends IndexCommit> commits) {
}
public void onCommit(List extends IndexCommit> commits) {
Index: lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestDocTermOrds.java (working copy)
@@ -154,11 +154,11 @@
System.out.println("TEST: reader=" + r);
}
- for(IndexReader subR : r.getSequentialSubReaders()) {
+ for(AtomicReaderContext ctx : r.leaves()) {
if (VERBOSE) {
- System.out.println("\nTEST: sub=" + subR);
+ System.out.println("\nTEST: sub=" + ctx.reader());
}
- verify((AtomicReader) subR, idToOrds, termsArray, null);
+ verify(ctx.reader(), idToOrds, termsArray, null);
}
// Also test top-level reader: its enum does not support
@@ -273,11 +273,11 @@
idToOrdsPrefix[id] = newOrdsArray;
}
- for(IndexReader subR : r.getSequentialSubReaders()) {
+ for(AtomicReaderContext ctx : r.leaves()) {
if (VERBOSE) {
- System.out.println("\nTEST: sub=" + subR);
+ System.out.println("\nTEST: sub=" + ctx.reader());
}
- verify((AtomicReader) subR, idToOrdsPrefix, termsArray, prefixRef);
+ verify(ctx.reader(), idToOrdsPrefix, termsArray, prefixRef);
}
// Also test top-level reader: its enum does not support
Index: lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java (working copy)
@@ -91,7 +91,7 @@
writer.close(true);
DirectoryReader reader = DirectoryReader.open(dir, 1);
- assertEquals(1, reader.getSequentialSubReaders().size());
+ assertEquals(1, reader.leaves().size());
IndexSearcher searcher = new IndexSearcher(reader);
Index: lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java (working copy)
@@ -289,8 +289,9 @@
assertEquals(numDocs, r.numDocs());
- for(IndexReader sub : r.getSequentialSubReaders()) {
- final int[] ids = FieldCache.DEFAULT.getInts((AtomicReader) sub, "id", false);
+ for(AtomicReaderContext ctx : r.leaves()) {
+ final AtomicReader sub = ctx.reader();
+ final int[] ids = FieldCache.DEFAULT.getInts(sub, "id", false);
for(int docID=0;docID 1);
+ assertTrue("Reader incorrectly sees one segment", reader.leaves().size() > 1);
reader.close();
// Abort the writer:
@@ -297,7 +297,7 @@
reader = DirectoryReader.open(dir);
// Reader should still see index as multi-segment
- assertTrue("Reader incorrectly sees one segment", reader.getSequentialSubReaders().size() > 1);
+ assertTrue("Reader incorrectly sees one segment", reader.leaves().size() > 1);
reader.close();
if (VERBOSE) {
@@ -316,7 +316,7 @@
reader = DirectoryReader.open(dir);
// Reader should see index as one segment
- assertEquals("Reader incorrectly sees more than one segment", 1, reader.getSequentialSubReaders().size());
+ assertEquals("Reader incorrectly sees more than one segment", 1, reader.leaves().size());
reader.close();
dir.close();
}
Index: lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java (working copy)
@@ -187,7 +187,7 @@
if (0 == pass) {
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
- assertEquals(1, reader.getSequentialSubReaders().size());
+ assertEquals(1, reader.leaves().size());
reader.close();
} else {
// Get another segment to flush so we can verify it is
@@ -197,7 +197,7 @@
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
- assertTrue(reader.getSequentialSubReaders().size() > 1);
+ assertTrue(reader.leaves().size() > 1);
reader.close();
SegmentInfos infos = new SegmentInfos();
Index: lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java (working copy)
@@ -339,13 +339,13 @@
if (compositeComposite) {
rd1 = new MultiReader(DirectoryReader.open(dir1), DirectoryReader.open(dir1));
rd2 = new MultiReader(DirectoryReader.open(dir2), DirectoryReader.open(dir2));
- assertEquals(2, rd1.getSequentialSubReaders().size());
- assertEquals(2, rd2.getSequentialSubReaders().size());
+ assertEquals(2, rd1.getTopReaderContext().children().size());
+ assertEquals(2, rd2.getTopReaderContext().children().size());
} else {
rd1 = DirectoryReader.open(dir1);
rd2 = DirectoryReader.open(dir2);
- assertEquals(3, rd1.getSequentialSubReaders().size());
- assertEquals(3, rd2.getSequentialSubReaders().size());
+ assertEquals(3, rd1.getTopReaderContext().children().size());
+ assertEquals(3, rd2.getTopReaderContext().children().size());
}
ParallelCompositeReader pr = new ParallelCompositeReader(rd1, rd2);
return newSearcher(pr);
Index: lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestPayloads.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestPayloads.java (working copy)
@@ -604,7 +604,7 @@
field.setTokenStream(ts);
writer.addDocument(doc);
DirectoryReader reader = writer.getReader();
- AtomicReader sr = reader.getSequentialSubReaders().get(0);
+ AtomicReader sr = SlowCompositeReaderWrapper.wrap(reader);
DocsAndPositionsEnum de = sr.termPositionsEnum(null, "field", new BytesRef("withPayload"));
de.nextDoc();
de.nextPosition();
Index: lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestPostingsOffsets.java (working copy)
@@ -289,9 +289,9 @@
w.close();
final String[] terms = new String[] {"a", "b", "c", "d"};
- for(IndexReader reader : r.getSequentialSubReaders()) {
+ for(AtomicReaderContext ctx : r.leaves()) {
// TODO: improve this
- AtomicReader sub = (AtomicReader) reader;
+ AtomicReader sub = ctx.reader();
//System.out.println("\nsub=" + sub);
final TermsEnum termsEnum = sub.fields().terms("content").iterator(null);
DocsEnum docs = null;
Index: lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java (working copy)
@@ -281,10 +281,10 @@
}
private static void printDocs(DirectoryReader r) throws Throwable {
- List extends AtomicReader> subs = r.getSequentialSubReaders();
- for(IndexReader sub : subs) {
+ for(AtomicReaderContext ctx : r.leaves()) {
// TODO: improve this
- Bits liveDocs = ((AtomicReader)sub).getLiveDocs();
+ AtomicReader sub = ctx.reader();
+ Bits liveDocs = sub.getLiveDocs();
System.out.println(" " + ((SegmentReader) sub).getSegmentInfo());
for(int docID=0;docID leaves = topReaderContext.leaves();
assertEquals(1, leaves.size());
@@ -371,7 +371,7 @@
writer.forceMerge(1);
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
- assertEquals(1, reader.getSequentialSubReaders().size());
+ assertEquals(1, reader.leaves().size());
IndexReaderContext topReaderContext = reader.getTopReaderContext();
List leaves = topReaderContext.leaves();
DocValues docValues = leaves.get(0).reader().docValues("promote");
Index: lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java
===================================================================
--- lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java (revision 1372657)
+++ lucene/core/src/test/org/apache/lucene/search/TestShardSearching.java (working copy)
@@ -22,8 +22,8 @@
import java.util.Collections;
import java.util.List;
-import org.apache.lucene.index.CompositeReader;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.Term;
@@ -310,13 +310,11 @@
final int numNodes = shardSearcher.nodeVersions.length;
int[] base = new int[numNodes];
- final List extends IndexReader> subs = ((CompositeReader) mockSearcher.getIndexReader()).getSequentialSubReaders();
+ final List subs = mockSearcher.getTopReaderContext().children();
assertEquals(numNodes, subs.size());
- int docCount = 0;
for(int nodeID=0;nodeID diagnostics = segReader.getSegmentInfo().info.getDiagnostics();
assertNotNull(diagnostics);
String source = diagnostics.get("source");
assertNotNull(source);
if (source.equals("merge")) {
assertTrue("sub reader " + sub + " wasn't warmed: warmed=" + warmed + " diagnostics=" + diagnostics + " si=" + segReader.getSegmentInfo(),
- !assertMergedSegmentsWarmed || warmed.containsKey(((SegmentReader) sub).core));
+ !assertMergedSegmentsWarmed || warmed.containsKey(segReader.core));
}
}
if (s.getIndexReader().numDocs() > 0) {
Index: lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
===================================================================
--- lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (revision 1372657)
+++ lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (working copy)
@@ -506,10 +506,10 @@
* do tests on that segment's reader. This is an utility method to help them.
*/
public static SegmentReader getOnlySegmentReader(DirectoryReader reader) {
- List extends IndexReader> subReaders = reader.getSequentialSubReaders();
+ List subReaders = reader.leaves();
if (subReaders.size() != 1)
throw new IllegalArgumentException(reader + " has " + subReaders.size() + " segments instead of exactly one");
- final IndexReader r = subReaders.get(0);
+ final AtomicReader r = subReaders.get(0).reader();
assertTrue(r instanceof SegmentReader);
return (SegmentReader) r;
}
Index: solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
===================================================================
--- solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java (revision 1372657)
+++ solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java (working copy)
@@ -679,7 +679,7 @@
if (buildOnCommit) {
buildSpellIndex(newSearcher);
} else if (buildOnOptimize) {
- if (newSearcher.getIndexReader().getSequentialSubReaders().size() == 1) {
+ if (newSearcher.getIndexReader().leaves().size() == 1) {
buildSpellIndex(newSearcher);
} else {
LOG.info("Index is not optimized therefore skipping building spell check index for: " + checker.getDictionaryName());
Index: solr/core/src/test/org/apache/solr/search/TestSort.java
===================================================================
--- solr/core/src/test/org/apache/solr/search/TestSort.java (revision 1372657)
+++ solr/core/src/test/org/apache/solr/search/TestSort.java (working copy)
@@ -198,7 +198,7 @@
DirectoryReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
// System.out.println("segments="+searcher.getIndexReader().getSequentialSubReaders().length);
- assertTrue(reader.getSequentialSubReaders().size() > 1);
+ assertTrue(reader.leaves().size() > 1);
for (int i=0; i