Index: lucene/CHANGES.txt =================================================================== --- lucene/CHANGES.txt (revision 1162001) +++ lucene/CHANGES.txt (working copy) @@ -50,7 +50,10 @@ throw NoSuchDirectoryException when all files written so far have been written to one directory, but the other still has not yet been created on the filesystem. (Robert Muir) - + +* LUCENE-3402: term vectors disappeared from the index if optimize() was called + following addIndexes(). (Shai Erera) + New Features * LUCENE-3290: Added FieldInvertState.numUniqueTerms Index: lucene/src/test/org/apache/lucene/search/TestTermVectors.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTermVectors.java (revision 1162001) +++ lucene/src/test/org/apache/lucene/search/TestTermVectors.java (working copy) @@ -17,11 +17,15 @@ * limitations under the License. */ +import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.Field.Index; +import org.apache.lucene.document.Field.Store; +import org.apache.lucene.document.Field.TermVector; import org.apache.lucene.index.*; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.store.Directory; @@ -443,4 +447,84 @@ } reader.close(); } + + private IndexWriter createWriter(Directory dir) throws IOException { + return new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, + new MockAnalyzer(random))); + } + + private void createDir(Directory dir) throws IOException { + IndexWriter writer = createWriter(dir); + writer.addDocument(createDoc()); + writer.close(); + } + + private Document createDoc() { + Document doc = new Document(); + doc.add(new Field("c", "aaa", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS)); + return doc; + } + + private void verifyIndex(Directory dir) throws IOException { + IndexReader r = IndexReader.open(dir); + int numDocs = r.numDocs(); + for (int i = 0; i < numDocs; i++) { + TermFreqVector tfv = r.getTermFreqVector(i, "c"); + assertNotNull("term vectors should not have been null for document " + i, tfv); + } + r.close(); + } + + public void testOptimizeAddDocs() throws Exception { + Directory target = newDirectory(); + IndexWriter writer = createWriter(target); + for (int i = 0; i < 2; i++) { + writer.addDocument(createDoc()); + writer.commit(); + } + writer.optimize(); + writer.close(); + + verifyIndex(target); + target.close(); + } + + public void testOptimizeAddIndexesDir() throws Exception { + Directory[] input = new Directory[] { newDirectory(), newDirectory() }; + Directory target = newDirectory(); + + for (Directory dir : input) { + createDir(dir); + } + + IndexWriter writer = createWriter(target); + writer.addIndexes(input); + writer.optimize(); + writer.close(); + + verifyIndex(target); + + IOUtils.closeSafely(false, target, input[0], input[1]); + } + + public void testOptimizeAddIndexesReader() throws Exception { + Directory[] input = new Directory[] { newDirectory(), newDirectory() }; + Directory target = newDirectory(); + + for (Directory dir : input) { + createDir(dir); + } + + IndexWriter writer = createWriter(target); + for (Directory dir : input) { + IndexReader r = IndexReader.open(dir); + writer.addIndexes(r); + r.close(); + } + writer.optimize(); + writer.close(); + + verifyIndex(target); + IOUtils.closeSafely(false, target, input[0], input[1]); + } } Index: lucene/src/java/org/apache/lucene/index/IndexWriter.java =================================================================== --- lucene/src/java/org/apache/lucene/index/IndexWriter.java (revision 1162001) +++ lucene/src/java/org/apache/lucene/index/IndexWriter.java (working copy) @@ -4209,8 +4209,6 @@ merge.readers = new ArrayList(); merge.readerClones = new ArrayList(); - merge.info.setHasVectors(merger.fieldInfos().hasVectors()); - // This is try/finally to make sure merger's readers are // closed: boolean success = false; @@ -4250,6 +4248,9 @@ // This is where all the work happens: mergedDocCount = merge.info.docCount = merger.merge(); + // LUCENE-3403: set hasVectors after merge(), so that it is properly set. + merge.info.setHasVectors(merger.fieldInfos().hasVectors()); + assert mergedDocCount == totDocCount; if (infoStream != null) {