Index: lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java =================================================================== --- lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java (revision 1428681) +++ lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java (working copy) @@ -82,7 +82,7 @@ final SegmentInfo si = new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, -1, false, codec, null, null); SegmentMerger merger = new SegmentMerger(si, InfoStream.getDefault(), mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, - MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), newIOContext(random())); + MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), newIOContext(random()), null); merger.add(reader1); merger.add(reader2); MergeState mergeState = merger.merge(); Index: lucene/core/src/test/org/apache/lucene/index/TestDoc.java =================================================================== --- lucene/core/src/test/org/apache/lucene/index/TestDoc.java (revision 1428681) +++ lucene/core/src/test/org/apache/lucene/index/TestDoc.java (working copy) @@ -219,7 +219,7 @@ final SegmentInfo si = new SegmentInfo(si1.info.dir, Constants.LUCENE_MAIN_VERSION, merged, -1, false, codec, null, null); SegmentMerger merger = new SegmentMerger(si, InfoStream.getDefault(), trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, - MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), context); + MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), context, null); merger.add(r1); merger.add(r2); Index: lucene/core/src/test/org/apache/lucene/index/TestExternalSegmentData.java =================================================================== --- lucene/core/src/test/org/apache/lucene/index/TestExternalSegmentData.java (revision 0) +++ lucene/core/src/test/org/apache/lucene/index/TestExternalSegmentData.java (working copy) @@ -0,0 +1,177 @@ +package org.apache.lucene.index; + +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util._TestUtil; + +public class TestExternalSegmentData extends LuceneTestCase { + + private static final class StoredFieldCountPerDocument extends ExternalSegmentData { + + @Override + public SegmentWriter newSegment(String segmentName) { + return new SegmentWriter() { + private final List fieldCounts = new ArrayList(); + + @Override + public void addDocument(int docID, IndexDocument doc) { + // Skip any missing (deleted due to exc during + // indexing) docs: + while(fieldCounts.size() < docID) { + fieldCounts.add(0); + } + int count = 0; + for(StorableField f : doc.storableFields()) { + if (f.fieldType().stored()) { + // RIW randomly adds not-stored DocValues + // fields, so we have to not count those + count++; + } + } + if (VERBOSE) { + System.out.println("FC: add docID=" + docID + " count=" + count); + } + fieldCounts.add(count); + } + + @Override + public void flush(SegmentWriteState state) throws IOException { + if (VERBOSE) { + System.out.println("FC: flush seg=" + state.segmentInfo.name + " dir=" + state.directory); + } + while(fieldCounts.size() < state.segmentInfo.getDocCount()) { + fieldCounts.add(0); + } + String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, "", "fc"); + IndexOutput o = state.directory.createOutput(fileName, IOContext.DEFAULT); + for(int size : fieldCounts) { + if (VERBOSE) { + System.out.println(" write " + size); + } + o.writeVInt(size); + } + o.close(); + } + }; + } + + @Override + public void merge(SegmentWriteState state, MergeState mergeState) throws IOException { + String mergedFileName = IndexFileNames.segmentFileName(mergeState.segmentInfo.name, "", "fc"); + if (VERBOSE) { + System.out.println("FC: merge seg=" + mergeState.segmentInfo.name); + } + IndexOutput o = state.directory.createOutput(mergedFileName, IOContext.DEFAULT); + + try { + int readerIDX = 0; + for(AtomicReader r : mergeState.readers) { + // NOTE: will not work if you eg addIndices(SlowCompositeReader...) + String fileName = IndexFileNames.segmentFileName(((SegmentReader) r).getSegmentName(), "", "fc"); + IndexInput i = state.directory.openInput(fileName, IOContext.DEFAULT); + try { + Bits liveDocs = r.getLiveDocs(); + for(int docID=0;docID(); @@ -66,6 +69,7 @@ this.codec = segmentInfo.getCodec(); this.context = context; this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers); + this.esd = esd; } /** @@ -120,6 +124,10 @@ FieldInfosWriter fieldInfosWriter = codec.fieldInfosFormat().getFieldInfosWriter(); fieldInfosWriter.write(directory, mergeState.segmentInfo.name, mergeState.fieldInfos, context); + if (esd != null) { + esd.merge(segmentWriteState, mergeState); + } + return mergeState; }