Index: lucene/facet/src/java/org/apache/lucene/facet/associations/SumFloatAssociationFacetsAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/associations/SumFloatAssociationFacetsAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/associations/SumFloatAssociationFacetsAggregator.java	(working copy)
@@ -9,6 +9,7 @@
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.search.OrdinalValueResolver;
 import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.util.BytesRef;
 
@@ -35,7 +36,7 @@
  * the association encoded for each ordinal is {@link CategoryFloatAssociation}.
  * <p>
  * <b>NOTE:</b> this aggregator does not support
- * {@link #rollupValues(FacetRequest, int, int[], int[], FacetArrays)}. It only
+ * {@link #rollupValues(FacetRequest, int, TaxonomyReader, FacetArrays)}. It only
  * aggregates the categories for which you added a {@link CategoryAssociation}.
  * 
  * @lucene.experimental
@@ -80,7 +81,7 @@
   }
 
   @Override
-  public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
+  public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxonomyReader, FacetArrays facetArrays) throws IOException {
     // NO-OP: this aggregator does no rollup values to the parents.
   }
 
Index: lucene/facet/src/java/org/apache/lucene/facet/associations/SumIntAssociationFacetsAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/associations/SumIntAssociationFacetsAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/associations/SumIntAssociationFacetsAggregator.java	(working copy)
@@ -9,6 +9,7 @@
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.search.OrdinalValueResolver;
 import org.apache.lucene.facet.search.OrdinalValueResolver.IntValueResolver;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.util.BytesRef;
 
@@ -36,7 +37,7 @@
  * {@link CategoryIntAssociation}.
  * <p>
  * <b>NOTE:</b> this aggregator does not support
- * {@link #rollupValues(FacetRequest, int, int[], int[], FacetArrays)}. It only
+ * {@link #rollupValues(FacetRequest, int, TaxonomyReader, FacetArrays)}. It only
  * aggregates the categories for which you added a {@link CategoryAssociation}.
  */
 public class SumIntAssociationFacetsAggregator implements FacetsAggregator {
@@ -79,7 +80,7 @@
   }
 
   @Override
-  public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
+  public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxonomyReader, FacetArrays facetArrays) throws IOException {
     // NO-OP: this aggregator does no rollup values to the parents.
   }
 
Index: lucene/facet/src/java/org/apache/lucene/facet/search/DepthOneFacetResultsHandler.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/DepthOneFacetResultsHandler.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/DepthOneFacetResultsHandler.java	(working copy)
@@ -7,7 +7,7 @@
 import java.util.Comparator;
 
 import org.apache.lucene.facet.search.FacetRequest.SortOrder;
-import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.util.CollectionUtil;
 import org.apache.lucene.util.PriorityQueue;
@@ -66,18 +66,20 @@
 
   @Override
   public final FacetResult compute() throws IOException {
-    ParallelTaxonomyArrays arrays = taxonomyReader.getParallelTaxonomyArrays();
-    final int[] children = arrays.children();
-    final int[] siblings = arrays.siblings();
+    int rootOrd = taxonomyReader.getOrdinal(facetRequest.categoryPath);
     
-    int rootOrd = taxonomyReader.getOrdinal(facetRequest.categoryPath);
-        
     FacetResultNode root = new FacetResultNode(rootOrd, resolver.valueOf(rootOrd));
     root.label = facetRequest.categoryPath;
+    
+    ChildrenIterator children = taxonomyReader.getChildren(rootOrd);
+    if (children == null) {
+      return new FacetResult(facetRequest, root, 0);
+    }
+    
     if (facetRequest.numResults > taxonomyReader.getSize()) {
       // specialize this case, user is interested in all available results
       ArrayList<FacetResultNode> nodes = new ArrayList<FacetResultNode>();
-      int ordinal = children[rootOrd];
+      int ordinal = children != null ? children.next() : TaxonomyReader.INVALID_ORDINAL;
       while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
         double value = resolver.valueOf(ordinal);
         if (value > 0) {
@@ -85,7 +87,7 @@
           node.label = taxonomyReader.getPath(ordinal);
           nodes.add(node);
         }
-        ordinal = siblings[ordinal];
+        ordinal = children.next();
       }
 
       CollectionUtil.introSort(nodes, Collections.reverseOrder(new Comparator<FacetResultNode>() {
@@ -101,7 +103,7 @@
     
     // since we use sentinel objects, we cannot reuse PQ. but that's ok because it's not big
     PriorityQueue<FacetResultNode> pq = new FacetResultNodeQueue(facetRequest.numResults, true);
-    int ordinal = children[rootOrd];
+    int ordinal = children != null ? children.next() : TaxonomyReader.INVALID_ORDINAL;
     FacetResultNode top = pq.top();
     int numSiblings = 0;
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
@@ -114,7 +116,7 @@
           top = pq.updateTop();
         }
       }
-      ordinal = siblings[ordinal];
+      ordinal = children.next();
     }
 
     // pop() the least (sentinel) elements
Index: lucene/facet/src/java/org/apache/lucene/facet/search/FacetsAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/FacetsAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/FacetsAggregator.java	(working copy)
@@ -5,6 +5,7 @@
 import org.apache.lucene.facet.params.CategoryListParams;
 import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -38,10 +39,10 @@
   /**
    * Rollup the values of the given ordinal. This method is called when a
    * category was indexed with {@link OrdinalPolicy#NO_PARENTS}. The given
-   * ordinal is the requested category, and you should use the children and
-   * siblings arrays to traverse its sub-tree.
+   * ordinal is the requested category, and you should use the
+   * {@link TaxonomyReader#getChildren(int)} to traverse its sub-tree.
    */
-  public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays);
+  public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxoReader, FacetArrays facetArrays) throws IOException;
   
   /** Returns {@code true} if this aggregator requires document scores. */
   public boolean requiresDocScores();
Index: lucene/facet/src/java/org/apache/lucene/facet/search/IntRollupFacetsAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/IntRollupFacetsAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/IntRollupFacetsAggregator.java	(working copy)
@@ -5,6 +5,7 @@
 import org.apache.lucene.facet.params.CategoryListParams;
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.search.OrdinalValueResolver.IntValueResolver;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 
 /*
@@ -26,7 +27,7 @@
 
 /**
  * A {@link FacetsAggregator} which implements
- * {@link #rollupValues(FacetRequest, int, int[], int[], FacetArrays)} by
+ * {@link #rollupValues(FacetRequest, int, TaxonomyReader, FacetArrays)} by
  * summing the values from {@link FacetArrays#getIntArray()}. Extending classes
  * should only implement {@link #aggregate}. Also, {@link #requiresDocScores()}
  * always returns false.
@@ -38,22 +39,23 @@
   @Override
   public abstract void aggregate(MatchingDocs matchingDocs, CategoryListParams clp, FacetArrays facetArrays) throws IOException;
   
-  private int rollupValues(int ordinal, int[] children, int[] siblings, int[] values) {
+  private int rollupValues(ChildrenIterator childrenIterator, TaxonomyReader taxoReader, int[] values) throws IOException {
     int value = 0;
+    int ordinal = childrenIterator != null ? childrenIterator.next() : TaxonomyReader.INVALID_ORDINAL;
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
       int childValue = values[ordinal];
-      childValue += rollupValues(children[ordinal], children, siblings, values);
+      childValue += rollupValues(taxoReader.getChildren(ordinal), taxoReader, values);
       values[ordinal] = childValue;
       value += childValue;
-      ordinal = siblings[ordinal];
+      ordinal = childrenIterator.next();
     }
     return value;
   }
 
   @Override
-  public final void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
+  public final void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxoReader, FacetArrays facetArrays) throws IOException {
     final int[] values = facetArrays.getIntArray();
-    values[ordinal] += rollupValues(children[ordinal], children, siblings, values);
+    values[ordinal] += rollupValues(taxoReader.getChildren(ordinal), taxoReader, values);
   }
   
   @Override
Index: lucene/facet/src/java/org/apache/lucene/facet/search/MultiFacetsAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/MultiFacetsAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/MultiFacetsAggregator.java	(working copy)
@@ -7,11 +7,9 @@
 import java.util.Map;
 
 import org.apache.lucene.facet.params.CategoryListParams;
-import org.apache.lucene.facet.search.FacetArrays;
-import org.apache.lucene.facet.search.FacetRequest;
-import org.apache.lucene.facet.search.FacetsAggregator;
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -74,8 +72,8 @@
   }
   
   @Override
-  public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
-    categoryAggregators.get(fr.categoryPath).rollupValues(fr, ordinal, children, siblings, facetArrays);
+  public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxoReader, FacetArrays facetArrays) throws IOException {
+    categoryAggregators.get(fr.categoryPath).rollupValues(fr, ordinal, taxoReader, facetArrays);
   }
   
   @Override
Index: lucene/facet/src/java/org/apache/lucene/facet/search/PerCategoryListAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/PerCategoryListAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/PerCategoryListAggregator.java	(working copy)
@@ -6,6 +6,7 @@
 import org.apache.lucene.facet.params.CategoryListParams;
 import org.apache.lucene.facet.params.FacetIndexingParams;
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
+import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -27,7 +28,7 @@
 /**
  * A {@link FacetsAggregator} which invokes the proper aggregator per
  * {@link CategoryListParams}.
- * {@link #rollupValues(FacetRequest, int, int[], int[], FacetArrays)} is
+ * {@link #rollupValues(FacetRequest, int, TaxonomyReader, FacetArrays)} is
  * delegated to the proper aggregator which handles the
  * {@link CategoryListParams} the given {@link FacetRequest} belongs to.
  */
@@ -47,9 +48,9 @@
   }
   
   @Override
-  public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
+  public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxoReader, FacetArrays facetArrays) throws IOException {
     CategoryListParams clp = fip.getCategoryListParams(fr.categoryPath);
-    aggregators.get(clp).rollupValues(fr, ordinal, children, siblings, facetArrays);
+    aggregators.get(clp).rollupValues(fr, ordinal, taxoReader, facetArrays);
   }
   
   @Override
Index: lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/SumScoreFacetsAggregator.java	(working copy)
@@ -5,6 +5,7 @@
 import org.apache.lucene.facet.params.CategoryListParams;
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.util.IntsRef;
 
@@ -55,22 +56,23 @@
     }
   }
   
-  private float rollupScores(int ordinal, int[] children, int[] siblings, float[] scores) {
+  private float rollupScores(ChildrenIterator children, TaxonomyReader taxoReader, float[] scores) throws IOException {
     float score = 0f;
+    int ordinal = children.next();
     while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
       float childScore = scores[ordinal];
-      childScore += rollupScores(children[ordinal], children, siblings, scores);
+      childScore += rollupScores(taxoReader.getChildren(ordinal), taxoReader, scores);
       scores[ordinal] = childScore;
       score += childScore;
-      ordinal = siblings[ordinal];
+      ordinal = children.next();
     }
     return score;
   }
 
   @Override
-  public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
+  public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxoReader, FacetArrays facetArrays) throws IOException {
     float[] scores = facetArrays.getFloatArray();
-    scores[ordinal] += rollupScores(children[ordinal], children, siblings, scores);
+    scores[ordinal] += rollupScores(taxoReader.getChildren(ordinal), taxoReader, scores);
   }
   
   @Override
Index: lucene/facet/src/java/org/apache/lucene/facet/search/SumValueSourceFacetRequest.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/SumValueSourceFacetRequest.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/SumValueSourceFacetRequest.java	(working copy)
@@ -11,6 +11,7 @@
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
@@ -54,22 +55,29 @@
       this.valueSource = valueSource;
     }
 
-    private float doRollup(int ordinal, int[] children, int[] siblings, float[] values) {
+    private float doRollup(ChildrenIterator kids, TaxonomyReader taxo, float[] values) throws IOException {
       float value = 0f;
+      int ordinal = kids.next();
       while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
         float childValue = values[ordinal];
-        childValue += doRollup(children[ordinal], children, siblings, values);
+        ChildrenIterator children = taxo.getChildren(ordinal);
+        if (children != null) {
+          childValue += doRollup(children, taxo, values);
+        }
         values[ordinal] = childValue;
         value += childValue;
-        ordinal = siblings[ordinal];
+        ordinal = kids.next();
       }
       return value;
     }
 
     @Override
-    public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
+    public void rollupValues(FacetRequest fr, int ordinal, TaxonomyReader taxoReader, FacetArrays facetArrays) throws IOException {
       float[] values = facetArrays.getFloatArray();
-      values[ordinal] += doRollup(children[ordinal], children, siblings, values);
+      ChildrenIterator children = taxoReader.getChildren(ordinal);
+      if (children != null) {
+        values[ordinal] += doRollup(children, taxoReader, values);
+      }
     }
 
     @Override
Index: lucene/facet/src/java/org/apache/lucene/facet/search/TaxonomyFacetsAccumulator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/TaxonomyFacetsAccumulator.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/TaxonomyFacetsAccumulator.java	(working copy)
@@ -15,7 +15,6 @@
 import org.apache.lucene.facet.search.FacetRequest.SortOrder;
 import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
-import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.index.IndexReader;
 
@@ -183,11 +182,7 @@
       }
     }
     
-    ParallelTaxonomyArrays arrays = taxonomyReader.getParallelTaxonomyArrays();
-    
     // compute top-K
-    final int[] children = arrays.children();
-    final int[] siblings = arrays.siblings();
     List<FacetResult> res = new ArrayList<FacetResult>();
     for (FacetRequest fr : searchParams.facetRequests) {
       int rootOrd = taxonomyReader.getOrdinal(fr.categoryPath);
@@ -201,7 +196,7 @@
         OrdinalPolicy ordinalPolicy = clp.getOrdinalPolicy(fr.categoryPath.components[0]);
         if (ordinalPolicy == OrdinalPolicy.NO_PARENTS) {
           // rollup values
-          aggregator.rollupValues(fr, rootOrd, children, siblings, facetArrays);
+          aggregator.rollupValues(fr, rootOrd, taxonomyReader, facetArrays);
         }
       }
       
Index: lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/TopKFacetResultsHandler.java	(working copy)
@@ -5,7 +5,7 @@
 
 import org.apache.lucene.facet.partitions.IntermediateFacetResult;
 import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
-import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.facet.util.ResultSortUtils;
 
@@ -107,24 +107,23 @@
       int offset) throws IOException {
     int partitionSize = facetArrays.arrayLength;
     int endOffset = offset + partitionSize;
-    ParallelTaxonomyArrays childrenArray = taxonomyReader.getParallelTaxonomyArrays();
-    int[] children = childrenArray.children();
-    int[] siblings = childrenArray.siblings();
     FacetResultNode reusable = null;
     int localDepth = 0;
     int depth = facetRequest.getDepth();
     int[] ordinalStack = new int[2+Math.min(Short.MAX_VALUE, depth)];
     int childrenCounter = 0;
-    
+    ChildrenIterator[] childStack = new ChildrenIterator[ordinalStack.length];
     int tosOrdinal; // top of stack element
     
-    int yc = children[ordinal];
+    ChildrenIterator kids = taxonomyReader.getChildren(ordinal);
+    int yc = kids != null ? kids.next() : TaxonomyReader.INVALID_ORDINAL;
     while (yc >= endOffset) {
-      yc = siblings[yc];
+      yc = kids.next();
     }
     // make use of the fact that TaxonomyReader.INVALID_ORDINAL == -1, < endOffset
     // and it, too, can stop the loop.
     ordinalStack[++localDepth] = yc;
+    childStack[localDepth] = kids;
     
     /*
      * stack holds input parameter ordinal in position 0.
@@ -143,9 +142,12 @@
       if (tosOrdinal == TaxonomyReader.INVALID_ORDINAL) {
         // element below tos has all its children, and itself, all processed
         // need to proceed to its sibling
+        if (localDepth == 1) {
+          break;
+        }
         localDepth--;
         // change element now on top of stack to its sibling.
-        ordinalStack[localDepth] = siblings[ordinalStack[localDepth]];
+        ordinalStack[localDepth] = childStack[localDepth].next();
         continue;
       }
       // top of stack is not invalid, this is the first time we see it on top of stack.
@@ -168,13 +170,16 @@
           reusable = pq.insertWithOverflow(reusable);
         }
       }
+      // push next kid to the stack
       if (localDepth < depth) {
-        // push kid of current tos
-        yc = children[tosOrdinal];
+        kids = taxonomyReader.getChildren(tosOrdinal);
+        yc = kids != null ? kids.next() : TaxonomyReader.INVALID_ORDINAL;
         while (yc >= endOffset) {
-          yc = siblings[yc];
+          yc = kids.next();
         }
+
         ordinalStack[++localDepth] = yc;
+        childStack[localDepth] = kids;
       } else { // localDepth == depth; current tos exhausted its possible children, mark this by pushing INVALID_ORDINAL
         ordinalStack[++localDepth] = TaxonomyReader.INVALID_ORDINAL;
       }
Index: lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/search/TopKInEachNodeHandler.java	(working copy)
@@ -9,7 +9,7 @@
 import org.apache.lucene.facet.partitions.IntermediateFacetResult;
 import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
 import org.apache.lucene.facet.search.FacetRequest.SortOrder;
-import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.util.PriorityQueue;
 
@@ -134,9 +134,6 @@
     }
 
     int endOffset = offset + partitionSize; // one past the largest ordinal in the partition
-    ParallelTaxonomyArrays childrenArray = taxonomyReader.getParallelTaxonomyArrays();
-    int[] children = childrenArray.children();
-    int[] siblings = childrenArray.siblings();
     int totalNumOfDescendantsConsidered = 0; // total number of facets with value != 0, 
     // in the tree. These include those selected as top K in each node, and all the others that
     // were not. Not including rootNode
@@ -198,6 +195,7 @@
     int[][] bestSignlingsStack = new int[depth+2][];
     int[] siblingExplored = new int[depth+2];
     int[] firstToTheLeftOfPartition = new int [depth+2];
+    ChildrenIterator[] childStack = new ChildrenIterator[depth+2];
 
     int tosOrdinal; // top of stack element, the ordinal at the top of stack
 
@@ -210,7 +208,9 @@
      * we can continue to the older sibling of rootNode once the localDepth goes down, before we verify that 
      * it went that down)
      */
-    ordinalStack[++localDepth] = children[rootNode];
+    ChildrenIterator kids = taxonomyReader.getChildren(rootNode);
+    ordinalStack[++localDepth] = kids != null? kids.next() : TaxonomyReader.INVALID_ORDINAL;
+    childStack[localDepth] = kids;
     siblingExplored[localDepth] = Integer.MAX_VALUE;  // we have not verified position wrt current partition
     siblingExplored[0] = -1; // as if rootNode resides to the left of current position
 
@@ -222,16 +222,22 @@
     while (localDepth > 0) {
       tosOrdinal = ordinalStack[localDepth];
       if (tosOrdinal == TaxonomyReader.INVALID_ORDINAL) {
+        // element below tos has all its children, and itself, all processed
+        // need to proceed to its sibling
+        if (localDepth == 1) {
+          break;
+        }
         // the brotherhood that has been occupying the top of stack is all exhausted.  
         // Hence, element below tos, namely, father of tos, has all its children, 
         // and itself, all explored. 
         localDepth--;
+        
         // replace this father, now on top of stack, by this father's sibling:
         // this parent's ordinal can not be greater than current partition, as otherwise
         // its child, now just removed, would not have been pushed on it.
         // so the father is either inside the partition, or smaller ordinal
         if (siblingExplored[localDepth] < 0 ) {
-          ordinalStack[localDepth] = siblings[ordinalStack[localDepth]];
+          ordinalStack[localDepth] = childStack[localDepth].next();
           continue;
         } 
         // in this point, siblingExplored[localDepth] between 0 and number of bestSiblings
@@ -257,7 +263,7 @@
         //tosOrdinal was not examined yet for its position relative to current partition
         // and the best K of current partition, among its siblings, have not been determined yet
         while (tosOrdinal >= endOffset) {
-          tosOrdinal = siblings[tosOrdinal];
+          tosOrdinal = childStack[localDepth].next();
         }
         // now it is inside. Run it and all its siblings inside the partition through a heap
         // and in doing so, count them, find best K
@@ -284,12 +290,11 @@
               // update totalNumOfDescendants by the now excluded node and all its descendants
               totalNumOfDescendantsConsidered--; // reduce the 1 earned when the excluded node entered the heap
               // and now return it and all its descendants. These will never make it to FacetResult
-              totalNumOfDescendantsConsidered += countOnly (ac.ordinal, children, 
-                  siblings, partitionSize, offset, endOffset, localDepth, depth);
+              totalNumOfDescendantsConsidered += countOnly (ac.ordinal, partitionSize, offset, endOffset, localDepth, depth);
               reusables[++tosReuslables] = ac;
             }
           }
-          tosOrdinal = siblings[tosOrdinal];  
+          tosOrdinal = childStack[localDepth].next();  
         }
         // now pq has best K children of ordinals that belong to the given partition.   
         // Populate a new AACO with them.
@@ -330,7 +335,11 @@
         ordinalStack[++localDepth] = TaxonomyReader.INVALID_ORDINAL;
         continue;
       }
-      ordinalStack[++localDepth] = children[tosOrdinal];
+      
+      kids = taxonomyReader.getChildren(tosOrdinal);
+      ordinalStack[++localDepth] = kids != null ? kids.next() : TaxonomyReader.INVALID_ORDINAL;
+      childStack[localDepth] = kids;
+
       siblingExplored[localDepth] = Integer.MAX_VALUE;
     } // endof loop while stack is not empty
 
@@ -358,9 +367,6 @@
    * as ordinal's descendants might be >= <code>offeset</code>.
    * 
    * @param ordinal a facet ordinal. 
-   * @param youngestChild mapping a given ordinal to its youngest child in the taxonomy (of largest ordinal number),
-   * or to -1 if has no children.  
-   * @param olderSibling  mapping a given ordinal to its older sibling, or to -1
    * @param partitionSize  number of ordinals in the given partition
    * @param offset  the first (smallest) ordinal in the given partition
    * @param endOffset one larger than the largest ordinal that belong to this partition
@@ -370,8 +376,8 @@
    * @return the number of nodes, from ordinal down its descendants, of depth <= maxDepth,
    * which reside in the current partition, and whose value != 0
    */
-  private int countOnly(int ordinal, int[] youngestChild, int[] olderSibling, int partitionSize, int offset, 
-      int endOffset, int currentDepth, int maxDepth) {
+  private int countOnly(int ordinal, int partitionSize, int offset, 
+      int endOffset, int currentDepth, int maxDepth) throws IOException {
     int ret = 0;
     if (offset <= ordinal) {
       // ordinal belongs to the current partition
@@ -384,14 +390,15 @@
       return ret;
     }
 
-    int yc = youngestChild[ordinal];
+    ChildrenIterator kids = taxonomyReader.getChildren(ordinal);
+    int yc = kids != null ? kids.next() : TaxonomyReader.INVALID_ORDINAL;
     while (yc >= endOffset) {
-      yc = olderSibling[yc];
+      yc = kids.next();
     }
     while (yc > TaxonomyReader.INVALID_ORDINAL) { // assuming this is -1, smaller than any legal ordinal
-      ret += countOnly (yc, youngestChild, olderSibling, partitionSize, 
+      ret += countOnly (yc, partitionSize, 
           offset, endOffset, currentDepth+1, maxDepth);
-      yc = olderSibling[yc];
+      yc = kids.next();
     }
     return ret;
   }
Index: lucene/facet/src/java/org/apache/lucene/facet/taxonomy/ChildrenIterator.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/taxonomy/ChildrenIterator.java	(revision 0)
+++ lucene/facet/src/java/org/apache/lucene/facet/taxonomy/ChildrenIterator.java	(working copy)
@@ -0,0 +1,29 @@
+package org.apache.lucene.facet.taxonomy;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** 
+ * An iterator over a category's children. 
+ * @lucene.experimental 
+ * */
+public abstract class ChildrenIterator {
+  
+  /** Return the next child ordinal, or {@link TaxonomyReader#INVALID_ORDINAL} if no more children. */
+  public abstract int next();
+  
+}
Index: lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyReader.java	(working copy)
@@ -65,31 +65,6 @@
  */
 public abstract class TaxonomyReader implements Closeable {
   
-  /** An iterator over a category's children. */
-  public static class ChildrenIterator {
-    
-    private final int[] siblings;
-    private int child;
-    
-    ChildrenIterator(int child, int[] siblings) {
-      this.siblings = siblings;
-      this.child = child;
-    }
-
-    /**
-     * Return the next child ordinal, or {@link TaxonomyReader#INVALID_ORDINAL}
-     * if no more children.
-     */
-    public int next() {
-      int res = child;
-      if (child != TaxonomyReader.INVALID_ORDINAL) {
-        child = siblings[child];
-      }
-      return res;
-    }
-    
-  }
-  
   /**
    * The root category (the category with the empty path) always has the ordinal
    * 0, to which we give a name ROOT_ORDINAL. {@link #getOrdinal(CategoryPath)}
@@ -190,14 +165,10 @@
    * Returns a {@link ParallelTaxonomyArrays} object which can be used to
    * efficiently traverse the taxonomy tree.
    */
-  public abstract ParallelTaxonomyArrays getParallelTaxonomyArrays() throws IOException;
+  protected abstract ParallelTaxonomyArrays getParallelTaxonomyArrays() throws IOException;
   
   /** Returns an iterator over the children of the given ordinal. */
-  public ChildrenIterator getChildren(final int ordinal) throws IOException {
-    ParallelTaxonomyArrays arrays = getParallelTaxonomyArrays();
-    int child = ordinal >= 0 ? arrays.children()[ordinal] : INVALID_ORDINAL;
-    return new ChildrenIterator(child, arrays.siblings());
-  }
+  public abstract ChildrenIterator getChildren(final int ordinal) throws IOException ;
   
   /**
    * Retrieve user committed data.
Index: lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java	(working copy)
@@ -1,12 +1,15 @@
 package org.apache.lucene.facet.taxonomy.directory;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.lucene.facet.collections.LRUHashMap;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
 import org.apache.lucene.index.CorruptIndexException; // javadocs
@@ -227,7 +230,7 @@
   }
 
   @Override
-  public ParallelTaxonomyArrays getParallelTaxonomyArrays() throws IOException {
+  protected ParallelTaxonomyArrays getParallelTaxonomyArrays() throws IOException {
     ensureOpen();
     if (taxoArrays == null) {
       initTaxoArrays();
@@ -382,5 +385,87 @@
     }
     return sb.toString();
   }
+
+  public static class ParallelArraysChildrenIterator extends ChildrenIterator {
+
+    private final int[] siblings;
+    private int child;
+    
+    ParallelArraysChildrenIterator(int child, int[] siblings) {
+      this.siblings = siblings;
+      this.child = child;
+    }
+    
+    @Override
+    public int next() {
+      int res = child;
+      if (child != TaxonomyReader.INVALID_ORDINAL) {
+        child = siblings[child];
+      }
+      return res;
+    }
+  }
+
+  // nocommit ---- 
+  // A quick and dirty map-based childrenIterator
+  // it does not support reopen
+  private HashMap<Integer, int[]> children = null;
+  private synchronized void initChildrenMap() throws IOException {
+    if (children == null) {
+      HashMap<Integer,int[]> newChildren = new HashMap<Integer, int[]>();
+      for (int ordinal = 0; ordinal < getSize() ; ++ordinal) {
+        ChildrenIterator taxoChildren = getChildrenOld(ordinal);
+        if (taxoChildren == null) {
+          continue;
+        }
+        ArrayList<Integer> kids = new ArrayList<Integer>();
+        for (int kidOrdinal = taxoChildren.next(); kidOrdinal != TaxonomyReader.INVALID_ORDINAL; kidOrdinal = taxoChildren.next()) {
+          kids.add(kidOrdinal);
+        }
+        int i = 0;
+        int[] intKids = new int[kids.size() + 1];
+        for (Integer integer : kids) {
+          intKids[i++] = integer.intValue();
+        }
+        // Adding last child as invalid. It wastes an int per (real) parent in
+        // memory, but spares an if call for every next(), avoiding AIOOB 
+        intKids[i] = TaxonomyReader.INVALID_ORDINAL;
+        newChildren.put(ordinal, intKids);
+      }
+      children = newChildren;
+    }
+  }
   
+  @Override
+  public ChildrenIterator getChildren(int ordinal) throws IOException {
+    if (children == null) {
+      // init should occure once, and not here. 
+      // and if the map is a viable alternative to PTA, than the map
+      // initialization and update should occur at the same place where PTA is
+      // being initalized and updated
+      initChildrenMap();
+    }
+    final int[] kids = children.get(ordinal);
+    if (kids == null) {
+      return null;
+    }
+    return new ChildrenIterator() {
+      int i = -1;
+      @Override
+      public int next() {
+        return kids[++i];
+      }
+    };
+  }
+  
+  /** Returns an iterator over the children of the given ordinal. */
+  public ChildrenIterator getChildrenOld(final int ordinal) throws IOException {
+    ParallelTaxonomyArrays arrays = getParallelTaxonomyArrays();
+    int child = ordinal >= 0 ? arrays.children()[ordinal] : INVALID_ORDINAL;
+    if (child >=0) {
+      return new ParallelArraysChildrenIterator(child, arrays.siblings());
+    }
+    return null;
+  }
+
 }
Index: lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java
===================================================================
--- lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java	(revision 1541426)
+++ lucene/facet/src/java/org/apache/lucene/facet/util/PrintTaxonomyStats.java	(working copy)
@@ -22,8 +22,8 @@
 import java.io.PrintStream;
 
 import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
-import org.apache.lucene.facet.taxonomy.TaxonomyReader.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
@@ -74,8 +74,12 @@
   }
 
   private static int countAllChildren(TaxonomyReader r, int ord) throws IOException {
+    ChildrenIterator it = r.getChildren(ord);
+    if (it == null) {
+      return 0;
+    }
+
     int count = 0;
-    ChildrenIterator it = r.getChildren(ord);
     int child;
     while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
       count += 1 + countAllChildren(r, child);
@@ -86,9 +90,11 @@
   private static void printAllChildren(PrintStream out, TaxonomyReader r, int ord, String indent, int depth) throws IOException {
     ChildrenIterator it = r.getChildren(ord);
     int child;
-    while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
-      out.println(indent + "/" + r.getPath(child).components[depth]);
-      printAllChildren(out, r, child, indent + "  ", depth+1);
+    if (it != null) { 
+      while ((child = it.next()) != TaxonomyReader.INVALID_ORDINAL) {
+        out.println(indent + "/" + r.getPath(child).components[depth]);
+        printAllChildren(out, r, child, indent + "  ", depth+1);
+      }
     }
   }
 }
Index: lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java
===================================================================
--- lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java	(revision 1541426)
+++ lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyCombined.java	(working copy)
@@ -769,9 +769,9 @@
         setPriority(1 + getPriority());
         try {
           while (!stop.get()) {
-            int lastOrd = tr.getParallelTaxonomyArrays().parents().length - 1;
+            int lastOrd = tr.getSize() - 1;
             assertNotNull("path of last-ord " + lastOrd + " is not found!", tr.getPath(lastOrd));
-            assertChildrenArrays(tr.getParallelTaxonomyArrays(), retry, retrieval[0]++);
+            assertChildrenArrays(tr, retry, retrieval[0]++);
             sleep(10); // don't starve refresh()'s CPU, which sleeps every 50 bytes for 1 ms
           }
         } catch (Throwable e) {
@@ -780,13 +780,15 @@
         }
       }
 
-      private void assertChildrenArrays(ParallelTaxonomyArrays ca, int retry, int retrieval) {
-        final int abYoungChild = ca.children()[abOrd];
+      private void assertChildrenArrays(TaxonomyReader tr, int retry, int retrieval) throws IOException {
+        ChildrenIterator children = tr.getChildren(abOrd);
+        
+        final int abYoungChild = children != null ? children.next() : TaxonomyReader.INVALID_ORDINAL;
         assertTrue(
             "Retry "+retry+": retrieval: "+retrieval+": wrong youngest child for category "+abPath+" (ord="+abOrd+
             ") - must be either "+abYoungChildBase1+" or "+abYoungChildBase2+" but was: "+abYoungChild,
             abYoungChildBase1==abYoungChild ||
-            abYoungChildBase2==ca.children()[abOrd]);
+            abYoungChildBase2==(children != null ? children.next() : TaxonomyReader.INVALID_ORDINAL));
       }
     };
     thread.start();
Index: lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java
===================================================================
--- lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java	(revision 1541426)
+++ lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyReader.java	(working copy)
@@ -9,8 +9,8 @@
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.facet.FacetTestCase;
 import org.apache.lucene.facet.taxonomy.CategoryPath;
+import org.apache.lucene.facet.taxonomy.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyReader;
-import org.apache.lucene.facet.taxonomy.TaxonomyReader.ChildrenIterator;
 import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
@@ -219,7 +219,7 @@
         return conf;
       }
     };
-    TaxonomyReader reader = new DirectoryTaxonomyReader(writer);
+    DirectoryTaxonomyReader reader = new DirectoryTaxonomyReader(writer);
     
     int numRounds = random().nextInt(10) + 10;
     int numCategories = 1; // one for root
@@ -229,7 +229,7 @@
         writer.addCategory(new CategoryPath(Integer.toString(i), Integer.toString(j)));
       }
       numCategories += numCats + 1 /* one for round-parent */;
-      TaxonomyReader newtr = TaxonomyReader.openIfChanged(reader);
+      DirectoryTaxonomyReader newtr = TaxonomyReader.openIfChanged(reader);
       assertNotNull(newtr);
       reader.close();
       reader = newtr;
@@ -270,7 +270,7 @@
       }
     };
     
-    TaxonomyReader reader = new DirectoryTaxonomyReader(writer);
+    DirectoryTaxonomyReader reader = new DirectoryTaxonomyReader(writer);
     assertEquals(1, reader.getSize());
     assertEquals(1, reader.getParallelTaxonomyArrays().parents().length);
 
@@ -280,7 +280,7 @@
     iw.forceMerge(1);
     
     // now calling openIfChanged should trip on the bug
-    TaxonomyReader newtr = TaxonomyReader.openIfChanged(reader);
+    DirectoryTaxonomyReader newtr = TaxonomyReader.openIfChanged(reader);
     assertNotNull(newtr);
     reader.close();
     reader = newtr;
@@ -317,7 +317,7 @@
     // a new segment will be created
     writer.addCategory(new CategoryPath("a"));
     
-    TaxonomyReader reader = new DirectoryTaxonomyReader(writer);
+    DirectoryTaxonomyReader reader = new DirectoryTaxonomyReader(writer);
     assertEquals(2, reader.getSize());
     assertEquals(2, reader.getParallelTaxonomyArrays().parents().length);
 
@@ -325,7 +325,7 @@
     iw.forceMerge(1);
     
     // now calling openIfChanged should trip on the wrong assert in ParetArray's ctor
-    TaxonomyReader newtr = TaxonomyReader.openIfChanged(reader);
+    DirectoryTaxonomyReader newtr = TaxonomyReader.openIfChanged(reader);
     assertNotNull(newtr);
     reader.close();
     reader = newtr;
@@ -493,19 +493,20 @@
 
     // non existing category
     ChildrenIterator it = taxoReader.getChildren(taxoReader.getOrdinal(new CategoryPath("invalid")));
-    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+    assertNull(it);
 
     // a category with no children
     it = taxoReader.getChildren(taxoReader.getOrdinal(new CategoryPath("c")));
-    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+    assertNull(it);
 
     // arbitrary negative ordinal
     it = taxoReader.getChildren(-2);
-    assertEquals(TaxonomyReader.INVALID_ORDINAL, it.next());
+    assertNull(it);
 
     // root's children
     Set<String> roots = new HashSet<String>(Arrays.asList("a", "b", "c"));
     it = taxoReader.getChildren(TaxonomyReader.ROOT_ORDINAL);
+    assertNotNull(it);
     while (!roots.isEmpty()) {
       CategoryPath root = taxoReader.getPath(it.next());
       assertEquals(1, root.length);
