Index: solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java =================================================================== --- solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java (revision 1439070) +++ solr/core/src/java/org/apache/solr/analysis/TrieTokenizerFactory.java (working copy) @@ -20,12 +20,15 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.util.TokenizerFactory; +import org.apache.lucene.util.Attribute; +import org.apache.lucene.util.AttributeImpl; import org.apache.solr.common.SolrException; import org.apache.solr.schema.DateField; import static org.apache.solr.schema.TrieField.TrieTypes; import java.io.IOException; import java.io.Reader; +import java.util.Iterator; /** * Tokenizer for trie fields. It uses NumericTokenStream to create multiple trie encoded string per number. @@ -68,9 +71,18 @@ return new NumericTokenStream(precisionStep); } - public TrieTokenizer(Reader input, TrieTypes type, NumericTokenStream ts) { - // must share the attribute source with the NumericTokenStream we delegate to - super(ts, input); + public TrieTokenizer(Reader input, TrieTypes type, final NumericTokenStream ts) { + // Häckidy-Hick-Hack: must share the attributes with the NumericTokenStream we delegate to, so we create a fake factory: + super(new AttributeFactory() { + @Override + public AttributeImpl createAttributeInstance(Class attClass) { + return (AttributeImpl) ts.addAttribute(attClass); + } + }, input); + // add all attributes: + for (Iterator> it = ts.getAttributeClassesIterator(); it.hasNext();) { + addAttribute(it.next()); + } this.type = type; this.ts = ts; }