Index: lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java =================================================================== --- lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java (revision 1127458) +++ lucene/src/java/org/apache/lucene/index/FreqProxTermsWriter.java (working copy) @@ -94,48 +94,49 @@ -> FormatPostingsPositionsConsumer -> IMPL: FormatPostingsPositionsWriter */ - - int start = 0; - while(start < numAllFields) { - final FieldInfo fieldInfo = allFields.get(start).fieldInfo; - final String fieldName = fieldInfo.name; - - int end = start+1; - while(end < numAllFields && allFields.get(end).fieldInfo.name.equals(fieldName)) - end++; - - FreqProxTermsWriterPerField[] fields = new FreqProxTermsWriterPerField[end-start]; - for(int i=start;i> entry : threadsAndFields.entrySet()) { + FreqProxTermsWriterPerThread perThread = (FreqProxTermsWriterPerThread) entry.getKey(); + perThread.termsHashPerThread.reset(true); } - - start = end; + } finally { + consumer.finish(); } - - for (Map.Entry> entry : threadsAndFields.entrySet()) { - FreqProxTermsWriterPerThread perThread = (FreqProxTermsWriterPerThread) entry.getKey(); - perThread.termsHashPerThread.reset(true); - } - - consumer.finish(); } private byte[] payloadBuffer; @@ -176,141 +177,147 @@ segDeletes = null; } - // TODO: really TermsHashPerField should take over most - // of this loop, including merge sort of terms from - // multiple threads and interacting with the - // TermsConsumer, only calling out to us (passing us the - // DocsConsumer) to handle delivery of docs/positions - while(numFields > 0) { + try { + // TODO: really TermsHashPerField should take over most + // of this loop, including merge sort of terms from + // multiple threads and interacting with the + // TermsConsumer, only calling out to us (passing us the + // DocsConsumer) to handle delivery of docs/positions + while(numFields > 0) { - // Get the next term to merge - termStates[0] = mergeStates[0]; - int numToMerge = 1; + // Get the next term to merge + termStates[0] = mergeStates[0]; + int numToMerge = 1; - // TODO: pqueue - for(int i=1;i 0) { - - FreqProxFieldMergeState minState = termStates[0]; - for(int i=1;i 0) { + + FreqProxFieldMergeState minState = termStates[0]; + for(int i=1;i> 1; + // Carefully copy over the prox + payload info, + // changing the format to match Lucene's segment + // format. + if (!currentFieldOmitTermFreqAndPositions) { + // omitTermFreqAndPositions == false so we do write positions & + // payload + try { + int position = 0; + for(int j=0;j> 1; + + final int payloadLength; + if ((code & 1) != 0) { + // This position has a payload + payloadLength = prox.readVInt(); + + if (payloadBuffer == null || payloadBuffer.length < payloadLength) + payloadBuffer = new byte[payloadLength]; + + prox.readBytes(payloadBuffer, 0, payloadLength); + + } else + payloadLength = 0; + + posConsumer.addPosition(position, payloadBuffer, 0, payloadLength); + } //End for + } finally { + posConsumer.finish(); + } + } - final int payloadLength; - if ((code & 1) != 0) { - // This position has a payload - payloadLength = prox.readVInt(); + if (!minState.nextDoc()) { - if (payloadBuffer == null || payloadBuffer.length < payloadLength) - payloadBuffer = new byte[payloadLength]; + // Remove from termStates + int upto = 0; + for(int i=0;i