Index: modules/join/src/java/org/apache/lucene/search/join/TermParentChildResolveCollector.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- modules/join/src/java/org/apache/lucene/search/join/TermParentChildResolveCollector.java	(revision )
+++ modules/join/src/java/org/apache/lucene/search/join/TermParentChildResolveCollector.java	(revision )
@@ -0,0 +1,146 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.AtomicReaderContext;
+import org.apache.lucene.search.*;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.SentinelIntSet;
+
+import java.io.IOException;
+
+/**
+ * Resolves documents matching the query belonging to a parent child document. Both parent and child documents can be
+ * resolved.
+ * <p/>
+ * A parent document is identified by the specified parentValue and a child document by the childValue. Further more
+ * parent and child documents both have a field with identifiers. Matching child or parent documents with the same
+ * value are resolved in a parent child relation.
+ * <p/>
+ * This collector depends on the {@link ParentChildResult} created by a {@link TermTopParentChildCollector} instance.
+ */
+// TODO: Extract common code and make that abstract so that we can make alternative impls (for example docvalues)
+public class TermParentChildResolveCollector extends Collector {
+
+  private final ParentChildResult parentChildResult;
+  private final BytesRef spare = new BytesRef();
+
+  private int docBase;
+  private SentinelIntSet parentOrdSet;
+  private SentinelIntSet childOrdSet;
+  private int parentOrd;
+  private int childOrd;
+  private FieldCache.DocTermsIndex parentFieldIndex;
+  private FieldCache.DocTermsIndex childFieldIndex;
+  private FieldCache.DocTermsIndex typeFieldIndex;
+  private ParentChildDoc[] parentDocs;
+  private ParentChildDoc[] childDocs;
+
+  public TermParentChildResolveCollector(ParentChildResult parentChildResult, ParentChildCommand command) throws IOException {
+    this.parentChildResult = parentChildResult;
+    parentOrdSet = new SentinelIntSet(parentChildResult.linkValues.size(), -1);
+    childOrdSet = new SentinelIntSet(parentChildResult.linkValues.size(), -1);
+    Sort joinSort = command.childrenSort();
+    for (ParentChildDoc parentChildDoc : parentChildResult.docs) {
+      if (joinSort == null || (joinSort.getSort().length == 1 && joinSort.getSort()[0].getType() == SortField.Type.SCORE)) {
+        // Sort by score
+        parentChildDoc.childDocs = TopScoreDocCollector.create(command.childrenNDocs(), false);
+      } else {
+        // Sort by fields
+        parentChildDoc.childDocs = TopFieldCollector.create(joinSort, command.childrenNDocs(), false, false, false, false);
+      }
+    }
+  }
+
+  @Override
+  public void setScorer(Scorer scorer) throws IOException {
+    for (ParentChildDoc parentChildDoc : parentChildResult.docs) {
+      parentChildDoc.childDocs.setScorer(scorer);
+    }
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+//    System.out.println("Type=" + typeFieldIndex.getTerm(doc, spare).utf8ToString());
+    if (typeFieldIndex.getOrd(doc) == parentOrd) {
+      handleParentDoc(doc);
+    } else if (typeFieldIndex.getOrd(doc) == childOrd) {
+      handleChildDoc(doc);
+    }
+  }
+
+  private void handleParentDoc(int parentDoc) throws IOException {
+    int parentOrd = parentFieldIndex.getOrd(parentDoc);
+    if (parentOrdSet.exists(parentOrd)) {
+      ParentChildDoc parentChildDoc = parentDocs[parentOrdSet.find(parentOrd)];
+      if (parentChildDoc != null && parentChildDoc.parentDoc == null) {
+        parentChildDoc.parentDoc = new ScoreDoc(parentDoc + docBase, parentChildDoc.childOnly.score);
+      }
+    }
+  }
+
+  private void handleChildDoc(int childDoc) throws IOException {
+    int childOrd = childFieldIndex.getOrd(childDoc);
+    if (childOrdSet.exists(childOrd)) {
+      ParentChildDoc parentChildDoc = childDocs[childOrdSet.find(childOrd)];
+      if (parentChildDoc != null) {
+        parentChildDoc.childDocs.collect(childDoc);
+      }
+    }
+  }
+
+  @Override
+  public void setNextReader(AtomicReaderContext context) throws IOException {
+    docBase = context.docBase;
+    for (ParentChildDoc parentChildDoc : parentChildResult.docs) {
+      parentChildDoc.childDocs.setNextReader(context);
+    }
+
+//    System.out.println("From field:" + prepareLinkResult.parentField);
+    parentFieldIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), parentChildResult.parentField);
+//    System.out.println("To field:" + prepareLinkResult.childField);
+    childFieldIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), parentChildResult.childField);
+    typeFieldIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), parentChildResult.typeField);
+    parentOrd = typeFieldIndex.binarySearchLookup(parentChildResult.parentValue, spare);
+    childOrd = typeFieldIndex.binarySearchLookup(parentChildResult.childValue, spare);
+
+    childOrdSet.clear();
+    parentOrdSet.clear();
+    childDocs = new ParentChildDoc[childOrdSet.keys.length];
+    parentDocs = new ParentChildDoc[parentOrdSet.keys.length];
+    for (BytesRef key : parentChildResult.linkValues) {
+      int ord = childFieldIndex.binarySearchLookup(key, spare);
+      if (ord >= 0) {
+//        System.out.println("Value " + key.utf8ToString() + " has TO ord " + ord);
+        childDocs[childOrdSet.put(ord)] = parentChildResult.linkValuesToParentChildDoc.get(key);
+      }
+
+      ord = parentFieldIndex.binarySearchLookup(key, spare);
+      if (ord >= 0) {
+//        System.out.println("Value " + key.utf8ToString() + " has FROM ord " + ord);
+        parentDocs[parentOrdSet.put(ord)] = parentChildResult.linkValuesToParentChildDoc.get(key);
+      }
+    }
+  }
+
+  @Override
+  public boolean acceptsDocsOutOfOrder() {
+    return true;
+  }
+
+}
\ No newline at end of file
Index: modules/join/src/java/org/apache/lucene/search/join/ParentChildDoc.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- modules/join/src/java/org/apache/lucene/search/join/ParentChildDoc.java	(revision )
+++ modules/join/src/java/org/apache/lucene/search/join/ParentChildDoc.java	(revision )
@@ -0,0 +1,87 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopDocsCollector;
+import org.apache.lucene.util.BytesRef;
+
+/**
+ * Represents parent child relation hit.
+ */
+public class ParentChildDoc {
+
+  ScoreDoc parentDoc;
+  BytesRef linkValue;
+
+  // @lucene.internal
+  ScoreDoc childOnly;
+  TopDocsCollector<?> childDocs;
+
+  public ScoreDoc getParentDoc() {
+    return parentDoc;
+  }
+
+  /**
+   * @return The original hit matching the user query. If parent child relations are grouped this is the most relevant
+   * matching document in the parent child relation.
+   */
+  public ScoreDoc getRealHit() {
+    return childOnly != null ? childOnly : parentDoc;
+  }
+
+  /**
+   * @return The top N children belonging to this parent child relation.
+   */
+  public TopDocs getChildDocs() {
+    return childDocs.topDocs();
+  }
+
+  public BytesRef getLinkValue() {
+    return linkValue;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    ParentChildDoc parentChildDoc = (ParentChildDoc) o;
+
+    if (getRealHit() != null ? getRealHit().doc != parentChildDoc.getRealHit().doc : parentChildDoc.getRealHit() != null) return false;
+    if (linkValue != null ? !linkValue.equals(parentChildDoc.linkValue) : parentChildDoc.linkValue != null) return false;
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    int result = getRealHit() != null ? getRealHit().hashCode() : 0;
+    result = 31 * result + (linkValue != null ? linkValue.hashCode() : 0);
+    return result;
+  }
+
+  @Override
+  public String toString() {
+    return "ParentChildDoc{" +
+            "getRealHit=" + getRealHit() +
+            ", linkValue=" + linkValue.utf8ToString() +
+            '}';
+  }
+}
Index: modules/join/src/test/org/apache/lucene/search/join/TestParentChildSearching.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- modules/join/src/test/org/apache/lucene/search/join/TestParentChildSearching.java	(revision )
+++ modules/join/src/test/org/apache/lucene/search/join/TestParentChildSearching.java	(revision )
@@ -0,0 +1,545 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.*;
+import org.apache.lucene.search.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util._TestUtil;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.*;
+
+public class TestParentChildSearching extends LuceneTestCase {
+
+  @Test
+  public void testRandom() throws Exception {
+    int maxIndexIter = _TestUtil.nextInt(random, 6, 12);
+    for (int indexIter = 1; indexIter <= maxIndexIter; indexIter++) {
+      if (VERBOSE) {
+        System.out.println("indexIter=" + indexIter);
+      }
+      Directory dir = newDirectory();
+      RandomIndexWriter w = new RandomIndexWriter(
+          random,
+          dir,
+          newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy())
+      );
+      int numberOfDocumentsToIndex = _TestUtil.nextInt(random, 118, 1076);
+      IndexIterationContext context = createContext(numberOfDocumentsToIndex, w);
+
+      IndexReader topLevelReader = w.getReader();
+      w.close();
+      try {
+        int maxSearchIter = _TestUtil.nextInt(random, 13, 26);
+        for (int searchIter = 1; searchIter <= maxSearchIter; searchIter++) {
+          if (VERBOSE) {
+            System.out.println("searchIter=" + searchIter);
+          }
+
+          IndexSearcher indexSearcher = newSearcher(topLevelReader);
+          boolean parent = context.randomParent[random.nextInt(context.randomUniqueLinkValues.length)];
+          BytesRef randomQueryValue = context.randomQueryValues[random.nextInt(context.randomQueryValues.length)];
+          Query query = new TermQuery(new Term("value", randomQueryValue));
+          if (VERBOSE) {
+          }
+          System.out.println("query=" + query);
+          ParentChildCommand command = new ParentChildCommand();
+          command.setGroupParent(true);  // TODO: randomize!
+          command.setGroupChild(true);  // TODO: randomize!
+          command.setParentField(parent ? "parent" : "child");
+          command.setChildField(parent ? "child" : "parent");
+          command.setTypeField("type");
+          command.setParentValue(parent ? new BytesRef("parent") : new BytesRef("child"));
+          command.setChildValue(parent ? new BytesRef("child") : new BytesRef("parent"));
+          command.setSort(selectRandomSort());
+          command.setChildrenSort(selectRandomSort());
+          command.setChildrenNDocs(1 + random.nextInt(numberOfDocumentsToIndex / 4));
+          command.setNDocs(1 + random.nextInt(numberOfDocumentsToIndex));
+          command.setChildrenQuery(random.nextBoolean() ? new MatchAllDocsQuery() : query);
+          if (VERBOSE) {
+            System.out.println(command);
+          }
+
+          ParentChildResult expectedResult = createExpectedResult(randomQueryValue, parent, indexSearcher, command, context);
+
+          TermTopParentChildCollector topLinkCollector = new TermTopParentChildCollector(command);
+          indexSearcher.search(query, topLinkCollector);
+          ParentChildResult result = topLinkCollector.getParentChildResult();
+          TermParentChildResolveCollector parentChildResolveCollector = new TermParentChildResolveCollector(result, command);
+          indexSearcher.search(command.childrenQuery(), parentChildResolveCollector);
+
+          assertEquals(expectedResult.hitCount, result.hitCount);
+          assertEquals(expectedResult.modCount, result.modCount);
+          assertEquals(expectedResult.linkValues, result.linkValues);
+          assertEquals(expectedResult.linkValuesToParentChildDoc, result.linkValuesToParentChildDoc);
+          assertEquals(expectedResult.getLinkedDocs().length, result.getLinkedDocs().length);
+
+          for (int i = 0; i < expectedResult.docs.length; i++) {
+            if (result.docs[i].childOnly != null) {
+              assertEquals(expectedResult.docs[i].childOnly.doc, result.docs[i].childOnly.doc);
+              assertEquals(expectedResult.docs[i].childOnly.score, result.docs[i].childOnly.score, 0.0);
+            } else {
+              assertEquals(expectedResult.docs[i].parentDoc.doc, result.docs[i].parentDoc.doc);
+              assertEquals(expectedResult.docs[i].parentDoc.score, result.docs[i].parentDoc.score, 0.0);
+              TopDocs expectedTopDocs = expectedResult.docs[i].childDocs.topDocs();
+              TopDocs actualTopDocs = result.docs[i].childDocs.topDocs();
+              assertEquals(expectedTopDocs.totalHits, actualTopDocs.totalHits);
+              assertEquals(expectedTopDocs.scoreDocs.length, expectedTopDocs.scoreDocs.length);
+              for (int j = 0; j < actualTopDocs.scoreDocs.length; j++) {
+                assertEquals(expectedTopDocs.scoreDocs[j].doc, actualTopDocs.scoreDocs[j].doc);
+              }
+            }
+          }
+        }
+      } finally {
+        FieldCache.DEFAULT.purge(SlowCompositeReaderWrapper.wrap(topLevelReader));
+        topLevelReader.close();
+        dir.close();
+      }
+    }
+  }
+
+  private ParentChildResult createExpectedResult(BytesRef query, boolean parentDocs, IndexSearcher indexSearcher, ParentChildCommand parentChildCommand, IndexIterationContext context) throws IOException {
+    ParentChildResult expectedResult = new ParentChildResult(parentChildCommand.getParentField(), parentChildCommand.getChildField(),
+        parentChildCommand.getTypeField(), parentChildCommand.nDocs(), parentChildCommand.getParentValue(), parentChildCommand.getChildValue());
+
+    Map<BytesRef, ParentChildHit> parentLinkHits = new HashMap<BytesRef, ParentChildHit>();
+    Map<BytesRef, ParentChildHit> childLinkHits = new HashMap<BytesRef, ParentChildHit>();
+    Set<BytesRef> seenValues = new HashSet<BytesRef>();
+
+    int i = 0;
+    final FieldComparator[] fieldComparators = new FieldComparator[parentChildCommand.sort().getSort().length];
+    MockScorer mockScorer = new MockScorer(indexSearcher);
+    final int[] reverse = new int[fieldComparators.length];
+    for (SortField sortField : parentChildCommand.sort().getSort()) {
+      reverse[i] = sortField.getReverse() ? -1 : 1;
+      fieldComparators[i] = sortField.getComparator(parentChildCommand.nDocs() + 1, i);
+      fieldComparators[i] = fieldComparators[i].setNextReader(SlowCompositeReaderWrapper.wrap(indexSearcher.getIndexReader()).getTopReaderContext());
+      fieldComparators[i++].setScorer(mockScorer);
+    }
+    int slot = 0;
+    final int comparatorEnd = fieldComparators.length - 1;
+    int spareCompSlot = parentChildCommand.nDocs();
+
+    List<RandomDoc> randomDocs = context.randomValueDocs.get(query);
+    if (randomDocs == null) {
+      expectedResult.docs = new ParentChildDoc[0];
+      return expectedResult;
+    }
+    expectedResult.hitCount = randomDocs.size();
+    if (randomDocs.isEmpty() || parentChildCommand.nDocs() <= 0) {
+      expectedResult.docs = new ParentChildDoc[0];
+      return expectedResult;
+    }
+
+    TreeSet<ParentChildHit> topParentChildHits = new TreeSet<ParentChildHit>();
+    boolean hitsFull = parentChildCommand.nDocs() == 0;
+    outer:
+    for (RandomDoc randomDoc : randomDocs) {
+      boolean parent = parentDocs ? "parent".equals(randomDoc.type) : "child".equals(randomDoc.type);
+      boolean child = !parent;
+      int doc = getLuceneDoc(indexSearcher, randomDoc.id);
+      mockScorer.nextRandomDoc(randomDoc, doc);
+
+      for (FieldComparator comparator : fieldComparators) {
+        comparator.copy(spareCompSlot, doc);
+      }
+
+      if (seenValues.contains(randomDoc.link)) {
+        ParentChildHit existingParentChildHit;
+        if (parent) {
+          existingParentChildHit = parentLinkHits.get(randomDoc.link);
+        } else if (child) {
+          existingParentChildHit = childLinkHits.get(randomDoc.link);
+        } else {
+          continue;
+        }
+
+        if (existingParentChildHit == null) {
+          continue;
+        }
+
+        for (int compIDX = 0; ; compIDX++) {
+          final int c = reverse[compIDX] * fieldComparators[compIDX].compare(existingParentChildHit.slot, spareCompSlot);
+          if (c < 0) {
+            continue outer;
+          } else if (c > 0) {
+            break;
+          } else if (compIDX == comparatorEnd) {
+            if (topParentChildHits.last().doc < doc) {
+              continue outer;
+            } else {
+              break;
+            }
+          }
+        }
+
+        if (existingParentChildHit.parent) {
+          assert parentLinkHits.remove(existingParentChildHit.value) != null;
+        } else if (existingParentChildHit.child) {
+          assert childLinkHits.remove(existingParentChildHit.value) != null;
+        }
+        assert topParentChildHits.remove(existingParentChildHit);
+
+        existingParentChildHit.doc = doc;
+        existingParentChildHit.score = mockScorer.score();
+        existingParentChildHit.value = randomDoc.link;
+        seenValues.add(existingParentChildHit.value);
+        existingParentChildHit.parent = parent;
+        existingParentChildHit.child = child;
+        for (FieldComparator comparator : fieldComparators) {
+          comparator.copy(existingParentChildHit.slot, doc);
+        }
+        if (parent) {
+          parentLinkHits.put(existingParentChildHit.value, existingParentChildHit);
+        } else if (child) {
+          childLinkHits.put(existingParentChildHit.value, existingParentChildHit);
+        }
+        topParentChildHits.add(existingParentChildHit);
+        continue;
+      }
+
+      final ParentChildHit parentChildHit;
+      if (!hitsFull) {
+        parentChildHit = new ParentChildHit(doc, mockScorer.score(), slot++, randomDoc.link, parent, child, fieldComparators, reverse, comparatorEnd);
+        if (parent) {
+          parentLinkHits.put(parentChildHit.value, parentChildHit);
+        } else if (child) {
+          childLinkHits.put(parentChildHit.value, parentChildHit);
+        }
+      } else {
+        for (int compIDX = 0; ; compIDX++) {
+          final int c = reverse[compIDX] * fieldComparators[compIDX].compareBottom(doc);
+          if (c < 0) {
+            continue outer;
+          } else if (c > 0) {
+            break;
+          } else if (compIDX == comparatorEnd) {
+            if (topParentChildHits.last().doc < doc) {
+              continue outer;
+            } else {
+              break;
+            }
+          }
+        }
+
+        parentChildHit = topParentChildHits.pollLast();
+        if (parentChildHit.parent) {
+          parentLinkHits.remove(parentChildHit.value);
+        } else if (parentChildHit.child) {
+          childLinkHits.remove(parentChildHit.value);
+        }
+
+        parentChildHit.doc = doc;
+        parentChildHit.score = mockScorer.score();
+        parentChildHit.value = randomDoc.link;
+        parentChildHit.parent = parent;
+        parentChildHit.child = child;
+        if (parent) {
+          parentLinkHits.put(parentChildHit.value, parentChildHit);
+        } else if (child) {
+          childLinkHits.put(parentChildHit.value, parentChildHit);
+        }
+      }
+      seenValues.add(parentChildHit.value);
+
+      for (FieldComparator comparator : fieldComparators) {
+        comparator.copy(parentChildHit.slot, doc);
+      }
+      expectedResult.modCount++;
+      topParentChildHits.add(parentChildHit);
+      hitsFull = topParentChildHits.size() >= (parentChildCommand.nDocs());
+      if (hitsFull && !topParentChildHits.isEmpty()) {
+        int lastSlot = topParentChildHits.last().slot;
+        for (FieldComparator comparator : fieldComparators) {
+          comparator.setBottom(lastSlot);
+        }
+      }
+    }
+
+    expectedResult.docs = new ParentChildDoc[topParentChildHits.size()];
+    i = 0;
+    for (ParentChildHit topParentChildHit : topParentChildHits) {
+      expectedResult.linkValues.add(topParentChildHit.value);
+      ParentChildDoc l = expectedResult.docs[i++] = new ParentChildDoc();
+      l.linkValue = topParentChildHit.value;
+      if (topParentChildHit.parent) {
+        l.parentDoc = topParentChildHit;
+        l.childDocs = TopFieldCollector.create(parentChildCommand.childrenSort(), parentChildCommand.childrenNDocs(), true, true, true, false);
+        l.childDocs.setNextReader(SlowCompositeReaderWrapper.wrap(indexSearcher.getIndexReader()).getTopReaderContext());
+        l.childDocs.setScorer(mockScorer);
+        Set<RandomDoc> linkedDocs = new HashSet<RandomDoc>();
+        List<RandomDoc> docs = parentDocs ? context.childDocuments.get(l.linkValue) : context.parentDocuments.get(l.linkValue);
+        if (docs != null) {
+          linkedDocs.addAll(docs);
+        }
+
+        for (RandomDoc randomDoc : linkedDocs) {
+          if (TermQuery.class.isAssignableFrom(parentChildCommand.childrenQuery().getClass()) && !query.equals(randomDoc.queryValue)) {
+            continue;
+          }
+
+          int doc = getLuceneDoc(indexSearcher, randomDoc.id);
+          if (MatchAllDocsQuery.class.isAssignableFrom(parentChildCommand.childrenQuery().getClass())) {
+            mockScorer.setCurrentScore(1.0f);
+          } else {
+            mockScorer.nextRandomDoc(randomDoc, doc);
+          }
+          l.childDocs.collect(doc);
+        }
+      } else {
+        l.childOnly = topParentChildHit;
+      }
+
+      expectedResult.linkValuesToParentChildDoc.put(topParentChildHit.value, l);
+    }
+
+    return expectedResult;
+  }
+
+  private IndexIterationContext createContext(int nDocs, RandomIndexWriter writer) throws IOException {
+    IndexIterationContext context = new IndexIterationContext();
+    int numRandomValues = nDocs / 2;
+    context.randomParent = new boolean[numRandomValues];
+    context.randomUniqueLinkValues = new BytesRef[numRandomValues];
+    context.randomQueryValues = new BytesRef[numRandomValues];
+    Set<String> trackSet = new HashSet<String>();
+    for (int i = 0; i < numRandomValues; i++) {
+      String uniqueRandomValue;
+      do {
+        uniqueRandomValue = _TestUtil.randomRealisticUnicodeString(random);
+//          uniqueRandomValue = _TestUtil.randomSimpleString(random);
+      } while ("".equals(uniqueRandomValue) || trackSet.contains(uniqueRandomValue));
+      // Generate unique values and empty strings aren't allowed.
+      trackSet.add(uniqueRandomValue);
+
+      context.randomUniqueLinkValues[i] = new BytesRef(uniqueRandomValue);
+//      context.randomQueryValues[i] = _TestUtil.randomSimpleString(random);
+
+//      String randomQueryValue;
+//      do {
+//        randomQueryValue = _TestUtil.randomRealisticUnicodeString(random);
+//      } while ("".equals(randomQueryValue));
+      context.randomQueryValues[i] = new BytesRef("content " + _TestUtil.randomRealisticUnicodeString(random));
+
+      context.randomParent[i] = random.nextInt(4) == 1;
+    }
+
+    FieldType fieldType = new FieldType();
+    fieldType.setIndexed(true);
+    fieldType.setStored(false);
+    fieldType.setTokenized(false);
+    fieldType.setOmitNorms(true);
+
+    for (int i = 0; i < nDocs; i++) {
+      String id = Integer.toString(i);
+      int randomI = random.nextInt(context.randomUniqueLinkValues.length);
+      BytesRef queryValue = context.randomQueryValues[randomI];
+      boolean parent = context.randomParent[randomI];
+      BytesRef linkValue = context.randomUniqueLinkValues[randomI];
+
+      Document document = new Document();
+      document.add(newField(random, "id", id, fieldType));
+      document.add(newField(random, "value", queryValue.utf8ToString(), fieldType));
+
+      RandomDoc doc;
+      if (parent) {
+        doc = new RandomDoc(id, "parent", linkValue, queryValue);
+        document.add(newField(random, "type", "parent", fieldType));
+        if (!context.parentDocuments.containsKey(linkValue)) {
+          context.parentDocuments.put(linkValue, new ArrayList<RandomDoc>());
+        }
+        context.parentDocuments.get(linkValue).add(doc);
+        document.add(newField(random, "parent", linkValue.utf8ToString(), fieldType));
+      } else {
+        doc = new RandomDoc(id, "child", linkValue, queryValue);
+        document.add(newField(random, "type", "child", fieldType));
+        if (!context.childDocuments.containsKey(linkValue)) {
+          context.childDocuments.put(linkValue, new ArrayList<RandomDoc>());
+        }
+        context.childDocuments.get(linkValue).add(doc);
+        document.add(newField(random, "child", linkValue.utf8ToString(), fieldType));
+      }
+
+      if (!context.randomValueDocs.containsKey(queryValue)) {
+        context.randomValueDocs.put(queryValue, new ArrayList<RandomDoc>());
+      }
+      context.randomValueDocs.get(queryValue).add(doc);
+
+      writer.addDocument(document);
+      if (random.nextInt(10) == 4) {
+        writer.commit();
+      }
+      if (VERBOSE) {
+        System.out.println("Added document[" + i + "]: " + document);
+      }
+    }
+    return context;
+  }
+
+  private Sort selectRandomSort() {
+    int numberOfSortFields = 1 + random.nextInt(10);
+    SortField[] sortFields = new SortField[numberOfSortFields];
+    for (int i = 0; i < numberOfSortFields; i++) {
+      sortFields[i] = selectRandomSortField(random.nextBoolean());
+    }
+    return new Sort(sortFields);
+  }
+
+  private SortField selectRandomSortField(boolean reverse) {
+    if (random.nextBoolean()) {
+      String field = random.nextBoolean() ? "id" : "value";
+      return new SortField(field, SortField.Type.STRING, reverse);
+    } else if (random.nextBoolean()) {
+      return new SortField(null, SortField.Type.SCORE, reverse);
+    } else {
+      return new SortField(null, SortField.Type.DOC, reverse);
+    }
+  }
+
+  private int getLuceneDoc(IndexSearcher searcher, String id) throws IOException {
+    return searcher.search(new TermQuery(new Term("id", id)), 1).scoreDocs[0].doc;
+  }
+
+  private static class IndexIterationContext {
+
+    BytesRef[] randomUniqueLinkValues;
+    boolean[] randomParent;
+    BytesRef[] randomQueryValues;
+
+    Map<BytesRef, List<RandomDoc>> parentDocuments = new HashMap<BytesRef, List<RandomDoc>>();
+    Map<BytesRef, List<RandomDoc>> childDocuments = new HashMap<BytesRef, List<RandomDoc>>();
+    Map<BytesRef, List<RandomDoc>> randomValueDocs = new HashMap<BytesRef, List<RandomDoc>>();
+
+  }
+
+  private static class RandomDoc {
+
+    final String id;
+    final String type;
+    final BytesRef link;
+    final BytesRef queryValue;
+
+    private RandomDoc(String id, String type, BytesRef link, BytesRef queryValue) {
+      this.id = id;
+      this.type = type;
+      this.link = link;
+      this.queryValue = queryValue;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) return true;
+      if (o == null || getClass() != o.getClass()) return false;
+
+      RandomDoc randomDoc = (RandomDoc) o;
+
+      if (id != null ? !id.equals(randomDoc.id) : randomDoc.id != null) return false;
+
+      return true;
+    }
+
+    @Override
+    public int hashCode() {
+      return id != null ? id.hashCode() : 0;
+    }
+
+    @Override
+    public String toString() {
+      return "RandomDoc{" +
+          "id='" + id + '\'' +
+          ", type='" + type + '\'' +
+          ", links=" + link +
+          ", value='" + queryValue + '\'' +
+          '}';
+    }
+  }
+
+  private static class MockScorer extends Scorer {
+
+    final IndexSearcher searcher;
+
+    float currentScore;
+    int currentDoc;
+    RandomDoc currentRandomDoc;
+
+    private MockScorer(IndexSearcher searcher) {
+      super(null);
+      this.searcher = searcher;
+    }
+
+    public float score() throws IOException {
+      return currentScore;
+    }
+
+    public int docID() {
+      return currentDoc;
+    }
+
+    public int nextDoc() throws IOException {
+      return 0;
+    }
+
+    public int advance(int target) throws IOException {
+      return 0;
+    }
+
+    public void setCurrentScore(float currentScore) {
+      this.currentScore = currentScore;
+    }
+
+    public void nextRandomDoc(RandomDoc randomDoc, int doc) throws IOException {
+      this.currentDoc = doc;
+      this.currentRandomDoc = randomDoc;
+
+      searcher.search(new TermQuery(new Term("value", randomDoc.queryValue)), new Collector() {
+
+        private Scorer scorer;
+        private int docBase;
+
+        public void collect(int doc) throws IOException {
+          if (currentDoc == (doc + docBase)) {
+            currentScore = scorer.score();
+          }
+        }
+
+        @Override
+        public void setNextReader(AtomicReaderContext atomicReaderContext) throws IOException {
+          docBase = atomicReaderContext.docBase;
+        }
+
+        public void setScorer(Scorer scorer) throws IOException {
+          this.scorer = scorer;
+        }
+
+        public boolean acceptsDocsOutOfOrder() {
+          return true;
+        }
+      });
+    }
+  }
+
+}
Index: modules/join/src/java/org/apache/lucene/search/join/ParentChildResult.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- modules/join/src/java/org/apache/lucene/search/join/ParentChildResult.java	(revision )
+++ modules/join/src/java/org/apache/lucene/search/join/ParentChildResult.java	(revision )
@@ -0,0 +1,80 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.util.BytesRef;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Represents a parent child search result.
+ */
+public class ParentChildResult {
+
+  final int nDocs;
+  final String parentField;
+  final String childField;
+  final String typeField;
+  final BytesRef parentValue;
+  final BytesRef childValue;
+
+  final List<BytesRef> linkValues; // TODO: Change into a Set
+  final Map<BytesRef, ParentChildDoc> linkValuesToParentChildDoc;
+
+  ParentChildDoc[] docs;
+  int hitCount;
+  int modCount;
+
+  ParentChildResult(String parentField, String childField, String typeField, int nDocs, BytesRef parentValue, BytesRef childValue) {
+    this.childField = childField;
+    this.typeField = typeField;
+    this.nDocs = nDocs;
+    this.parentField = parentField;
+    this.parentValue = parentValue;
+    this.childValue = childValue;
+
+    linkValues = new ArrayList<BytesRef>();
+    linkValuesToParentChildDoc = new HashMap<BytesRef, ParentChildDoc>();
+//    toLinkValuesToLinkedDoc = new HashMap<BytesRef, ParentChildDoc>();
+  }
+
+  public ParentChildDoc[] getLinkedDocs() {
+    if (docs == null) {
+      return new ParentChildDoc[0];
+    }
+
+    return docs;
+  }
+
+  /**
+   * @return The number of hits matching to query
+   */
+  public int hitCount() {
+    return hitCount;
+  }
+
+  /**
+   * @return The number of times a new parent / child relation has been seen.
+   */
+  public int getModCount() {
+    return modCount;
+  }
+}
Index: modules/join/src/java/org/apache/lucene/search/join/ParentChildCommand.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- modules/join/src/java/org/apache/lucene/search/join/ParentChildCommand.java	(revision )
+++ modules/join/src/java/org/apache/lucene/search/join/ParentChildCommand.java	(revision )
@@ -0,0 +1,207 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.util.BytesRef;
+
+/**
+ * @author Martijn van Groningen
+ */
+public class ParentChildCommand {
+  
+  private String parentField;
+  private String childField;
+  private String typeField;
+  
+  private BytesRef parentValue;
+  private BytesRef childValue;
+
+  private boolean groupParent;
+  private boolean groupChild;
+
+  private int nDocs;
+  private Sort sort;
+  private Sort childrenSort;
+  private Query childrenQuery;
+  private int childrenNDocs;
+  private boolean includeNonRelatedDocuments;
+
+  /**
+   * @return The field where the parent values are located
+   */
+  public String getParentField() {
+    return parentField;
+  }
+
+  public void setParentField(String parentField) {
+    this.parentField = parentField;
+  }
+
+  /**
+   * @return The field where child values are located
+   */
+  public String getChildField() {
+    return childField;
+  }
+
+  public void setChildField(String childField) {
+    this.childField = childField;
+  }
+
+  /**
+   * @return The field that helps identifying a document as parent or child
+   */
+  public String getTypeField() {
+    return typeField;
+  }
+
+  public void setTypeField(String typeField) {
+    this.typeField = typeField;
+  }
+
+  /**
+   * @return The value that marks a document as parent when present in type field.
+   */
+  public BytesRef getParentValue() {
+    return parentValue;
+  }
+
+  public void setParentValue(BytesRef parentValue) {
+    this.parentValue = parentValue;
+  }
+
+  /**
+   * @return The value that marks a document as child when present in type field.
+   */
+  public BytesRef getChildValue() {
+    return childValue;
+  }
+
+  public void setChildValue(BytesRef childValue) {
+    this.childValue = childValue;
+  }
+
+  /**
+   * @return Whether to group all documents belonging to parent child relation if the parent already hit
+   */
+  public boolean groupParent() {
+    return groupParent;
+  }
+
+  public void setGroupParent(boolean groupParent) {
+    this.groupParent = groupParent;
+  }
+
+  /**
+   * @return Whether to group all documents belonging to parent child relation if one of the childs already hit
+   */
+  public boolean groupChild() {
+    return groupChild;
+  }
+
+  public void setGroupChild(boolean groupChild) {
+    this.groupChild = groupChild;
+  }
+
+  /**
+   * @return The top N parent child relations to retrieve (offset + length)
+   */
+  public int nDocs() {
+    return nDocs;
+  }
+
+  public void setNDocs(int nDocs) {
+    this.nDocs = nDocs;
+  }
+
+  /**
+   * @return The sort used for the parent child relations
+   */
+  public Sort sort() {
+    return sort;
+  }
+
+  public void setSort(Sort sort) {
+    this.sort = sort;
+  }
+
+  /**
+   * @return The sort used to sort the children inside a parent child relation
+   */
+  public Sort childrenSort() {
+    return childrenSort;
+  }
+
+  public void setChildrenSort(Sort childrenSort) {
+    this.childrenSort = childrenSort;
+  }
+
+  /**
+   * @return The query used to gather the children documents belonging to the top N parent child relations.
+   */
+  public Query childrenQuery() {
+    return childrenQuery;
+  }
+
+  public void setChildrenQuery(Query childrenQuery) {
+    this.childrenQuery = childrenQuery;
+  }
+
+  /**
+   * @return The number of children to retrieve per top N parent child relations
+   */
+  public int childrenNDocs() {
+    return childrenNDocs;
+  }
+
+  public void setChildrenNDocs(int childrenNDocs) {
+    this.childrenNDocs = childrenNDocs;
+  }
+
+  /**
+   * @return Whether documents not belonging to a parent child relation should be included
+   */
+  public boolean includeNonRelatedDocuments() {
+    return includeNonRelatedDocuments;
+  }
+
+  public void setIncludeNonRelatedDocuments(boolean includeNonRelatedDocuments) {
+    this.includeNonRelatedDocuments = includeNonRelatedDocuments;
+  }
+
+  @Override
+  public String toString() {
+    return "ParentChildCommand{" +
+            "parentField='" + parentField + '\'' +
+            ", childField='" + childField + '\'' +
+            ", typeField='" + typeField + '\'' +
+            ", parentValue=" + parentValue +
+            ", childValue=" + childValue +
+            ", groupParent=" + groupParent +
+            ", groupChild=" + groupChild +
+            ", nDocs=" + nDocs +
+            ", sort=" + sort +
+            ", childrenSort=" + childrenSort +
+            ", childrenQuery=" + childrenQuery +
+            ", childrenNDocs=" + childrenNDocs +
+            ", includeNonRelatedDocuments=" + includeNonRelatedDocuments +
+            '}';
+  }
+}
Index: modules/join/src/java/org/apache/lucene/search/join/TermTopParentChildCollector.java
IDEA additional info:
Subsystem: com.intellij.openapi.diff.impl.patch.CharsetEP
<+>UTF-8
===================================================================
--- modules/join/src/java/org/apache/lucene/search/join/TermTopParentChildCollector.java	(revision )
+++ modules/join/src/java/org/apache/lucene/search/join/TermTopParentChildCollector.java	(revision )
@@ -0,0 +1,378 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.AtomicReaderContext;
+import org.apache.lucene.search.*;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.FixedBitSet;
+
+import java.io.IOException;
+import java.util.*;
+
+/**
+ * Collects the top N document belonging to a parent child relation that is matching with a query.
+ *
+ * If the group option is active then per parent child relation (pcr) this collector decides which document is the most
+ * relevant. Per relation only the most relevant document is kept.
+ * <p/>
+ * Also this collector decides whether a document belongs to the "parent" or "child" side of a relations based on the
+ * type field.
+ * <p/>
+ * <p/>
+ * The collector requires you to specify a type value representing the parent side and a type value representing the
+ * child side
+ * <p/>
+ * All documents are joined by the indexed term based values of a document hence the name Term.
+ */
+// TODO: Extract common code and make that abstract so that we can make alternative impls (for example docvalues)
+public class TermTopParentChildCollector extends Collector {
+
+  private final int topN;
+  private final FieldComparator<?>[] comparators;
+  private final int[] reversed;
+  private final int comparatorEnd;
+  private final int spareCompSlot;
+  private final NavigableSet<ParentChildHit> topParentChildHits;
+  private final ParentChildResult parentChildResult;
+  private final String parentField;
+  private final String childField;
+  private final String typeField;
+  private final BytesRef parentValue;
+  private final BytesRef childValue;
+  private final boolean groupParent;
+  private final boolean groupChild;
+  private final boolean includeNonRelatedDocuments;
+  private final BytesRef spare = new BytesRef();
+
+  private final Collection<BytesRef> seenValues;
+  private FixedBitSet seenParentOrds;
+  private FixedBitSet seenChildOrds;
+
+  private final Map<BytesRef, ParentChildHit> parentLinkValueToHits = new HashMap<BytesRef, ParentChildHit>();
+  private final Map<BytesRef, ParentChildHit> childrenLinkValueToHits = new HashMap<BytesRef, ParentChildHit>();
+
+  private int docBase;
+  private FieldCache.DocTermsIndex parentFieldIndex;
+  private FieldCache.DocTermsIndex childFieldIndex;
+  private FieldCache.DocTermsIndex typeFieldIndex;
+  private Scorer scorer;
+  private boolean hitsFull = false;
+  private int parentOrd;
+  private int childOrd;
+
+  public TermTopParentChildCollector(ParentChildCommand command) throws IOException {
+    this.topN = command.nDocs();
+    this.parentField = command.getParentField();
+    this.childField = command.getChildField();
+    this.typeField = command.getTypeField();
+    this.parentValue = command.getParentValue();
+    this.childValue = command.getChildValue();
+    this.groupParent = command.groupParent();
+    this.groupChild = command.groupChild();
+    this.includeNonRelatedDocuments = command.includeNonRelatedDocuments();
+
+    comparators = new FieldComparator<?>[command.sort().getSort().length];
+    reversed = new int[command.sort().getSort().length];
+    spareCompSlot = topN;
+    comparatorEnd = comparators.length - 1;
+    int i = 0;
+    for (SortField sortField : command.sort().getSort()) {
+      comparators[i] = sortField.getComparator(topN + 1, i);
+      reversed[i++] = sortField.getReverse() ? -1 : 1;
+    }
+
+    topParentChildHits = new TreeSet<ParentChildHit>();
+    parentChildResult = new ParentChildResult(parentField, childField, typeField, command.nDocs(), parentValue, childValue);
+    seenValues = new HashSet<BytesRef>(command.nDocs());
+  }
+
+  public void collect(int doc) throws IOException {
+    boolean parent = typeFieldIndex.getOrd(doc) == parentOrd;
+    boolean child = typeFieldIndex.getOrd(doc) == childOrd;
+    parentChildResult.hitCount++;
+
+    if (hitsFull) {
+      // Shortcut
+      for (int compIDX = 0; ; compIDX++) {
+        final int c = reversed[compIDX] * comparators[compIDX].compareBottom(doc);
+        if (c < 0) {
+          // Definitely not competitive. So don't even bother to continue
+          return;
+        } else if (c > 0) {
+          // Definitely competitive.
+          break;
+        } else if (compIDX == comparatorEnd) {
+          // Docs may not be visited in doc order...
+          if (topParentChildHits.last().doc < (doc + docBase)) {
+            return;
+          } else {
+            break;
+          }
+        }
+      }
+    }
+
+    int ord;
+    if (parent) {
+      ord = parentFieldIndex.getOrd(doc);
+      if (groupParent && seenParentOrds.get(ord)) {
+        groupHit(doc, ord, parent, child);
+        return;
+      }
+    } else if (child) {
+      ord = childFieldIndex.getOrd(doc);
+      if (groupChild && seenChildOrds.get(ord)) {
+        groupHit(doc, ord, parent, child);
+        return;
+      }
+    } else if (includeNonRelatedDocuments) {
+      ord = -1;
+    } else {
+      return;
+    }
+
+    ParentChildHit parentChildHit;
+    if (hitsFull) {
+      parentChildHit = topParentChildHits.pollLast();
+    } else {
+      parentChildHit = new ParentChildHit(comparators, reversed, comparatorEnd, topParentChildHits.size());
+    }
+    parentChildResult.modCount++;
+    updateHit(parentChildHit, doc, ord, parent, child);
+  }
+
+  private void groupHit(int doc, int ord, boolean parent, boolean child) throws IOException {
+    ParentChildHit parentChildHit;
+    if (parent) {
+      parentChildHit = parentLinkValueToHits.get(parentFieldIndex.lookup(ord, spare));
+    } else if (child) {
+      parentChildHit = childrenLinkValueToHits.get(childFieldIndex.lookup(ord, spare));
+    } else {
+      return;
+    }
+
+    if (parentChildHit == null) {
+      return;
+    }
+
+    for (FieldComparator<?> comparator : comparators) {
+      comparator.copy(spareCompSlot, doc);
+    }
+
+    for (int compIDX = 0; ; compIDX++) {
+      final int c = reversed[compIDX] * comparators[compIDX].compare(parentChildHit.slot, spareCompSlot);
+      if (c < 0) {
+        // Definitely not competitive. So don't even bother to continue
+        return;
+      } else if (c > 0) {
+        // Definitely competitive.
+        break;
+      } else if (compIDX == comparatorEnd) {
+        // Docs may not be visited in doc order...
+        if (topParentChildHits.last().doc < (doc + docBase)) {
+          return;
+        } else {
+          break;
+        }
+      }
+    }
+    topParentChildHits.remove(parentChildHit);
+    updateHit(parentChildHit, doc, ord, parent, child);
+  }
+
+  private void updateHit(ParentChildHit existingParentChildHit, int doc, int ord, boolean parent, boolean child) throws IOException {
+    if (existingParentChildHit.parent) {
+      parentLinkValueToHits.remove(existingParentChildHit.value);
+    } else if (existingParentChildHit.child) {
+      childrenLinkValueToHits.remove(existingParentChildHit.value);
+    }
+
+    existingParentChildHit.doc = doc + docBase;
+    if (parent) {
+      existingParentChildHit.value = parentFieldIndex.lookup(ord, existingParentChildHit.value);
+    } else if (child) {
+      existingParentChildHit.value = childFieldIndex.lookup(ord, existingParentChildHit.value);
+    }
+    existingParentChildHit.score = scorer.score();
+    existingParentChildHit.parent = parent;
+    existingParentChildHit.child = child;
+
+    if (parent) {
+      parentLinkValueToHits.put(existingParentChildHit.value, existingParentChildHit);
+    } else {
+      childrenLinkValueToHits.put(existingParentChildHit.value, existingParentChildHit);
+    }
+
+    for (FieldComparator<?> comparator : comparators) {
+      comparator.copy(existingParentChildHit.slot, doc);
+    }
+
+    topParentChildHits.add(existingParentChildHit);
+
+    seenValues.add(existingParentChildHit.value);
+    if (parent) {
+      seenParentOrds.set(ord);
+      int childOrd = childFieldIndex.binarySearchLookup(existingParentChildHit.value, spare);
+      if (childOrd > 0) {
+        seenChildOrds.set(childOrd);
+      }
+    } else if (child) {
+      seenChildOrds.set(ord);
+      int parentOrd = parentFieldIndex.binarySearchLookup(existingParentChildHit.value, spare);
+      if (parentOrd > 0) {
+        seenParentOrds.set(parentOrd);
+      }
+    }
+
+    if (!hitsFull) {
+      hitsFull = topParentChildHits.size() >= topN;
+    }
+    if (hitsFull) {
+      int lastSlot = topParentChildHits.last().slot;
+      for (FieldComparator<?> comparator : comparators) {
+        comparator.setBottom(lastSlot);
+      }
+    }
+  }
+
+  public ParentChildResult getParentChildResult() {
+    parentChildResult.docs = new ParentChildDoc[topParentChildHits.size()];
+    int i = 0;
+    for (ParentChildHit parentChildHit : topParentChildHits) {
+//      System.out.println("hit.doc=" + hit.doc);
+      ParentChildDoc parentChildDoc = new ParentChildDoc();
+      if (parentChildHit.parent || parentChildHit.child) {
+        parentChildResult.linkValuesToParentChildDoc.put(parentChildHit.value, parentChildDoc);
+      }
+      if (parentChildHit.parent) {
+        parentChildDoc.parentDoc = parentChildHit;
+      } else {
+        parentChildDoc.childOnly = parentChildHit;
+      }
+      parentChildDoc.linkValue = parentChildHit.value;
+      parentChildResult.linkValues.add(parentChildHit.value);
+      parentChildResult.docs[i++] = parentChildDoc;
+    }
+    return parentChildResult;
+  }
+
+  public void setScorer(Scorer scorer) throws IOException {
+    this.scorer = scorer;
+    for (FieldComparator<?> comparator : comparators) {
+      comparator.setScorer(scorer);
+    }
+  }
+
+  public void setNextReader(AtomicReaderContext context) throws IOException {
+    docBase = context.docBase;
+    parentFieldIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), parentField);
+    childFieldIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), childField);
+
+    typeFieldIndex = FieldCache.DEFAULT.getTermsIndex(context.reader(), typeField);
+    parentOrd = typeFieldIndex.binarySearchLookup(parentValue, spare);
+    childOrd = typeFieldIndex.binarySearchLookup(childValue, spare);
+
+    for (int i = 0; i < comparators.length; i++) {
+      comparators[i] = comparators[i].setNextReader(context);
+    }
+
+    seenParentOrds = new FixedBitSet(parentFieldIndex.numOrd());
+    seenChildOrds = new FixedBitSet(childFieldIndex.numOrd());
+    for (BytesRef seenValue : seenValues) {
+      if (seenValue == null) {
+        continue;
+      }
+
+      int ord = parentFieldIndex.binarySearchLookup(seenValue, spare);
+      if (ord > 0) {
+        seenParentOrds.set(ord);
+      }
+
+      ord = childFieldIndex.binarySearchLookup(seenValue, spare);
+      if (ord > 0) {
+        seenChildOrds.set(ord);
+      }
+    }
+  }
+
+  public boolean acceptsDocsOutOfOrder() {
+    return true;
+  }
+}
+
+class ParentChildHit extends ScoreDoc implements Comparable<ParentChildHit> {
+
+  final FieldComparator[] comparators;
+  final int[] reversed;
+  final int comparatorEnd;
+  final int slot;
+
+  BytesRef value;
+  boolean parent;
+  boolean child;
+
+  ParentChildHit(FieldComparator[] comparators, int[] reversed, int comparatorEnd, int slot) {
+    super(-1, Float.NaN);
+    this.comparators = comparators;
+    this.reversed = reversed;
+    this.comparatorEnd = comparatorEnd;
+    this.slot = slot;
+    this.value = new BytesRef();
+  }
+
+  ParentChildHit(int doc, float score, int slot, BytesRef value, boolean parent, boolean child, FieldComparator[] comparators, int[] reversed, int comparatorEnd) {
+    super(doc, score);
+    this.slot = slot;
+    this.value = value;
+    this.parent = parent;
+    this.child = child;
+    this.comparators = comparators;
+    this.reversed = reversed;
+    this.comparatorEnd = comparatorEnd;
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    ParentChildHit parentChildHit = (ParentChildHit) o;
+
+    if (slot != parentChildHit.slot) return false;
+
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    return slot;
+  }
+
+  public int compareTo(ParentChildHit otherParentChildHit) {
+    for (int comparatorIndex = 0; ; comparatorIndex++) {
+      FieldComparator<?> fc = comparators[comparatorIndex];
+      final int c = reversed[comparatorIndex] * fc.compare(slot, otherParentChildHit.slot);
+      if (c != 0) {
+        return c;
+      } else if (comparatorIndex == comparatorEnd) {
+        return doc - otherParentChildHit.doc;
+      }
+    }
+  }
+}
