Index: lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
===================================================================
--- lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java	(révision 1430752)
+++ lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java	(copie de travail)
@@ -34,6 +34,7 @@
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Enumeration;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.IdentityHashMap;
 import java.util.List;
@@ -111,8 +112,6 @@
     Collections.<Class<?>>addAll(brokenComponents,
       // doesn't actual reset itself!
       CachingTokenFilter.class,
-      // doesn't consume whole stream!
-      LimitTokenCountFilter.class,
       // Not broken: we forcefully add this, so we shouldn't
       // also randomly pick it:
       ValidatingTokenFilter.class,
@@ -138,6 +137,36 @@
     );
   }
 
+  private static interface Predicate<T> {
+    boolean apply(T o);
+  }
+
+  private static final Predicate<Object[]> ALWAYS = new Predicate<Object[]>() {
+    public boolean apply(Object[] args) {
+      return true;
+    };
+  };
+
+  private static final Map<Constructor<?>,Predicate<Object[]>> brokenConstructors = new HashMap<Constructor<?>, Predicate<Object[]>>();
+  static {
+    try {
+      brokenConstructors.put(
+          LimitTokenCountFilter.class.getConstructor(TokenStream.class, int.class),
+          ALWAYS);
+      brokenConstructors.put(
+          LimitTokenCountFilter.class.getConstructor(TokenStream.class, int.class, boolean.class),
+          new Predicate<Object[]>() {
+            @Override
+            public boolean apply(Object[] args) {
+              assert args.length == 3;
+              return !((Boolean) args[2]); // args are broken if consumeAllTokens is false
+            }
+          });
+    } catch (Exception e) {
+      throw new Error(e);
+    }
+  }
+
   // TODO: also fix these and remove (maybe):
   // Classes that don't produce consistent graph offsets:
   private static final Set<Class<?>> brokenOffsetsComponents = Collections.newSetFromMap(new IdentityHashMap<Class<?>,Boolean>());
@@ -679,7 +708,12 @@
       }
       return null; // no success
     }
-    
+
+    private boolean broken(Constructor<?> ctor, Object[] args) {
+      final Predicate<Object[]> pred = brokenConstructors.get(ctor);
+      return pred != null && pred.apply(args);
+    }
+
     // create a new random tokenizer from classpath
     private TokenizerSpec newTokenizer(Random random, Reader reader) {
       TokenizerSpec spec = new TokenizerSpec();
@@ -688,6 +722,9 @@
         final StringBuilder descr = new StringBuilder();
         final CheckThatYouDidntReadAnythingReaderWrapper wrapper = new CheckThatYouDidntReadAnythingReaderWrapper(reader);
         final Object args[] = newTokenizerArgs(random, wrapper, ctor.getParameterTypes());
+        if (broken(ctor, args)) {
+          continue;
+        }
         spec.tokenizer = createComponent(ctor, args, descr);
         if (spec.tokenizer != null) {
           if (brokenOffsetsComponents.contains(ctor.getDeclaringClass())) {
@@ -710,6 +747,9 @@
         while (true) {
           final Constructor<? extends CharFilter> ctor = charfilters.get(random.nextInt(charfilters.size()));
           final Object args[] = newCharFilterArgs(random, spec.reader, ctor.getParameterTypes());
+          if (broken(ctor, args)) {
+            continue;
+          }
           reader = createComponent(ctor, args, descr);
           if (reader != null) {
             spec.reader = reader;
@@ -746,6 +786,9 @@
           }
           
           final Object args[] = newFilterArgs(random, spec.stream, ctor.getParameterTypes());
+          if (broken(ctor, args)) {
+            continue;
+          }
           final TokenFilter flt = createComponent(ctor, args, descr);
           if (flt != null) {
             if (brokenOffsetsComponents.contains(ctor.getDeclaringClass())) {
