Index: lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java =================================================================== --- lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java (revision 1428419) +++ lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java (working copy) @@ -20,15 +20,47 @@ import java.io.IOException; import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; +import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.store.Directory; -public class TestEmptyTokenStream extends LuceneTestCase { +public class TestEmptyTokenStream extends BaseTokenStreamTestCase { - public void test() throws IOException { + public void testConsume() throws IOException { TokenStream ts = new EmptyTokenStream(); assertFalse(ts.incrementToken()); ts.reset(); assertFalse(ts.incrementToken()); } + + public void testConsume2() throws IOException { + BaseTokenStreamTestCase.assertTokenStreamContents(new EmptyTokenStream(), new String[0]); + } + public void testIndexWriter_LUCENE4656() throws IOException { + Directory directory = newDirectory(); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( + TEST_VERSION_CURRENT, null)); + + TokenStream ts = new EmptyTokenStream(); + assertFalse(ts.hasAttribute(TermToBytesRefAttribute.class)); + + Document doc = new Document(); + doc.add(new StringField("id", "0", Field.Store.YES)); + doc.add(new TextField("description", ts)); + + // this should not fail because we have no TermToBytesRefAttribute + writer.addDocument(doc); + + writer.close(); + directory.close(); + } + } Index: lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java =================================================================== --- lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java (revision 1428419) +++ lucene/core/src/java/org/apache/lucene/index/DocInverterPerField.java (working copy) @@ -106,75 +106,75 @@ OffsetAttribute offsetAttribute = fieldState.attributeSource.addAttribute(OffsetAttribute.class); PositionIncrementAttribute posIncrAttribute = fieldState.attributeSource.addAttribute(PositionIncrementAttribute.class); - consumer.start(field); + if (hasMoreTokens) { + consumer.start(field); - for (;;) { + while (hasMoreTokens) { - // If we hit an exception in stream.next below - // (which is fairly common, eg if analyzer - // chokes on a given document), then it's - // non-aborting and (above) this one document - // will be marked as deleted, but still - // consume a docID + // If we hit an exception in stream.next below + // (which is fairly common, eg if analyzer + // chokes on a given document), then it's + // non-aborting and (above) this one document + // will be marked as deleted, but still + // consume a docID - if (!hasMoreTokens) break; + final int posIncr = posIncrAttribute.getPositionIncrement(); + if (posIncr < 0) { + throw new IllegalArgumentException("position increment must be >=0 (got " + posIncr + ")"); + } + if (fieldState.position == 0 && posIncr == 0) { + throw new IllegalArgumentException("first position increment must be > 0 (got 0)"); + } + int position = fieldState.position + posIncr; + if (position > 0) { + // NOTE: confusing: this "mirrors" the + // position++ we do below + position--; + } else if (position < 0) { + throw new IllegalArgumentException("position overflow for field '" + field.name() + "'"); + } + + // position is legal, we can safely place it in fieldState now. + // not sure if anything will use fieldState after non-aborting exc... + fieldState.position = position; - final int posIncr = posIncrAttribute.getPositionIncrement(); - if (posIncr < 0) { - throw new IllegalArgumentException("position increment must be >=0 (got " + posIncr + ")"); - } - if (fieldState.position == 0 && posIncr == 0) { - throw new IllegalArgumentException("first position increment must be > 0 (got 0)"); - } - int position = fieldState.position + posIncr; - if (position > 0) { - // NOTE: confusing: this "mirrors" the - // position++ we do below - position--; - } else if (position < 0) { - throw new IllegalArgumentException("position overflow for field '" + field.name() + "'"); - } - - // position is legal, we can safely place it in fieldState now. - // not sure if anything will use fieldState after non-aborting exc... - fieldState.position = position; + if (posIncr == 0) + fieldState.numOverlap++; + + if (checkOffsets) { + int startOffset = fieldState.offset + offsetAttribute.startOffset(); + int endOffset = fieldState.offset + offsetAttribute.endOffset(); + if (startOffset < 0 || endOffset < startOffset) { + throw new IllegalArgumentException("startOffset must be non-negative, and endOffset must be >= startOffset, " + + "startOffset=" + startOffset + ",endOffset=" + endOffset); + } + if (startOffset < lastStartOffset) { + throw new IllegalArgumentException("offsets must not go backwards startOffset=" + + startOffset + " is < lastStartOffset=" + lastStartOffset); + } + lastStartOffset = startOffset; + } - if (posIncr == 0) - fieldState.numOverlap++; - - if (checkOffsets) { - int startOffset = fieldState.offset + offsetAttribute.startOffset(); - int endOffset = fieldState.offset + offsetAttribute.endOffset(); - if (startOffset < 0 || endOffset < startOffset) { - throw new IllegalArgumentException("startOffset must be non-negative, and endOffset must be >= startOffset, " - + "startOffset=" + startOffset + ",endOffset=" + endOffset); + boolean success = false; + try { + // If we hit an exception in here, we abort + // all buffered documents since the last + // flush, on the likelihood that the + // internal state of the consumer is now + // corrupt and should not be flushed to a + // new segment: + consumer.add(); + success = true; + } finally { + if (!success) { + docState.docWriter.setAborting(); + } } - if (startOffset < lastStartOffset) { - throw new IllegalArgumentException("offsets must not go backwards startOffset=" - + startOffset + " is < lastStartOffset=" + lastStartOffset); - } - lastStartOffset = startOffset; - } + fieldState.length++; + fieldState.position++; - boolean success = false; - try { - // If we hit an exception in here, we abort - // all buffered documents since the last - // flush, on the likelihood that the - // internal state of the consumer is now - // corrupt and should not be flushed to a - // new segment: - consumer.add(); - success = true; - } finally { - if (!success) { - docState.docWriter.setAborting(); - } + hasMoreTokens = stream.incrementToken(); } - fieldState.length++; - fieldState.position++; - - hasMoreTokens = stream.incrementToken(); } // trigger streams to perform end-of-stream operations stream.end();