Index: modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestLimitTokenCountAnalyzer.java =================================================================== --- modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestLimitTokenCountAnalyzer.java (revision 1091052) +++ modules/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestLimitTokenCountAnalyzer.java (working copy) @@ -51,7 +51,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( - TEST_VERSION_CURRENT, new LimitTokenCountAnalyzer(new MockAnalyzer(), 100000))); + TEST_VERSION_CURRENT, new LimitTokenCountAnalyzer(new MockAnalyzer(random), 100000))); Document doc = new Document(); StringBuilder b = new StringBuilder(); Index: modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java =================================================================== --- modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java (revision 1091052) +++ modules/analysis/common/src/test/org/apache/lucene/collation/CollationTestBase.java (working copy) @@ -186,7 +186,7 @@ String dkResult) throws Exception { RAMDirectory indexStore = new RAMDirectory(); IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); // document data: // the tracer field is used to determine which document was hit Index: modules/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java =================================================================== --- modules/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java (revision 1091052) +++ modules/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java (working copy) @@ -96,7 +96,7 @@ assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory())); // now we should be able to open the index for write. IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), - new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true); @@ -183,7 +183,7 @@ assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory())); // now we should be able to open the index for write. - IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true); assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs()); @@ -222,7 +222,7 @@ assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory())); // now we should be able to open the index for write. - IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true); assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs()); @@ -295,7 +295,7 @@ assertEquals("TestSearchTask was supposed to be called!",139,CountingSearchTestTask.numSearches); assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory())); // now we should be able to open the index for write. - IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true); assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs()); @@ -425,7 +425,7 @@ // now we should be able to open the index for write. IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), - new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND)); iw.close(); Index: lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java =================================================================== --- lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (working copy) @@ -90,7 +90,7 @@ doc.add(idField); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new MyMergeScheduler()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new MyMergeScheduler()) .setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setMergePolicy(newLogMergePolicy())); LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy(); Index: lucene/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java =================================================================== --- lucene/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (working copy) @@ -69,7 +69,7 @@ public void testSimple() throws Exception { String[] fields = {"b", "t"}; - MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer()); + MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(random)); Query q = mfqp.parse("one"); assertEquals("b:one t:one", q.toString()); @@ -132,7 +132,7 @@ boosts.put("b", Float.valueOf(5)); boosts.put("t", Float.valueOf(10)); String[] fields = {"b", "t"}; - MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(), boosts); + MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new MockAnalyzer(random), boosts); //Check for simple @@ -158,24 +158,24 @@ public void testStaticMethod1() throws ParseException { String[] fields = {"b", "t"}; String[] queries = {"one", "two"}; - Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, new MockAnalyzer()); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, new MockAnalyzer(random)); assertEquals("b:one t:two", q.toString()); String[] queries2 = {"+one", "+two"}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries2, fields, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries2, fields, new MockAnalyzer(random)); assertEquals("(+b:one) (+t:two)", q.toString()); String[] queries3 = {"one", "+two"}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries3, fields, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries3, fields, new MockAnalyzer(random)); assertEquals("b:one (+t:two)", q.toString()); String[] queries4 = {"one +more", "+two"}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries4, fields, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries4, fields, new MockAnalyzer(random)); assertEquals("(b:one +b:more) (+t:two)", q.toString()); String[] queries5 = {"blah"}; try { - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries5, fields, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries5, fields, new MockAnalyzer(random)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -197,15 +197,15 @@ public void testStaticMethod2() throws ParseException { String[] fields = {"b", "t"}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT}; - Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer()); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(random)); assertEquals("+b:one -t:one", q.toString()); - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(random)); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(random)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -217,15 +217,15 @@ //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT}; - Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer());//, fields, flags, new MockAnalyzer()); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new MockAnalyzer(random));//, fields, flags, new MockAnalyzer(random)); assertEquals("+b:one -t:one", q.toString()); - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new MockAnalyzer(random)); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new MockAnalyzer(random)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -237,12 +237,12 @@ String[] fields = {"f1", "f2", "f3"}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD}; - Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer()); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(random)); assertEquals("+f1:one -f2:two f3:three", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(random)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -253,12 +253,12 @@ String[] queries = {"one", "two"}; String[] fields = {"b", "t"}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT}; - Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer()); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new MockAnalyzer(random)); assertEquals("+b:one -t:two", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer()); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new MockAnalyzer(random)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -280,7 +280,7 @@ } public void testStopWordSearching() throws Exception { - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); Directory ramDir = newDirectory(); IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); @@ -303,7 +303,7 @@ * Return empty tokens for field "f1". */ private static class AnalyzerReturningNull extends Analyzer { - MockAnalyzer stdAnalyzer = new MockAnalyzer(); + MockAnalyzer stdAnalyzer = new MockAnalyzer(random); public AnalyzerReturningNull() { } Index: lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java =================================================================== --- lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java (working copy) @@ -148,7 +148,7 @@ public QueryParser getParser(Analyzer a) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", a); qp.setDefaultOperator(QueryParser.OR_OPERATOR); return qp; @@ -218,7 +218,7 @@ public Query getQueryDOA(String query, Analyzer a) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", a); qp.setDefaultOperator(QueryParser.AND_OPERATOR); return qp.parse(query); @@ -339,8 +339,8 @@ public void testSimple() throws Exception { assertQueryEquals("term term term", null, "term term term"); - assertQueryEquals("türm term term", new MockAnalyzer(), "türm term term"); - assertQueryEquals("ümlaut", new MockAnalyzer(), "ümlaut"); + assertQueryEquals("türm term term", new MockAnalyzer(random), "türm term term"); + assertQueryEquals("ümlaut", new MockAnalyzer(random), "ümlaut"); // FIXME: enhance MockAnalyzer to be able to support this // it must no longer extend CharTokenizer @@ -400,7 +400,7 @@ assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null, "+(title:dog title:cat) -author:\"bob dole\""); - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer()); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random)); // make sure OR is the default: assertEquals(QueryParser.OR_OPERATOR, qp.getDefaultOperator()); qp.setDefaultOperator(QueryParser.AND_OPERATOR); @@ -410,7 +410,7 @@ } public void testPunct() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertQueryEquals("a&b", a, "a&b"); assertQueryEquals("a&&b", a, "a&&b"); assertQueryEquals(".NET", a, ".NET"); @@ -430,7 +430,7 @@ assertQueryEquals("term 1.0 1 2", null, "term"); assertQueryEquals("term term1 term2", null, "term term term"); - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, true); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, true); assertQueryEquals("3", a, "3"); assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2"); assertQueryEquals("term term1 term2", a, "term term1 term2"); @@ -558,7 +558,7 @@ assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)getQuery("[ a TO z]", null)).getRewriteMethod()); - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.SIMPLE, true)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.SIMPLE, true)); qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE,((TermRangeQuery)qp.parse("[ a TO z]")).getRewriteMethod()); @@ -618,7 +618,7 @@ final String defaultField = "default"; final String monthField = "month"; final String hourField = "hour"; - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.SIMPLE, true)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.SIMPLE, true)); // set a field specific date resolution qp.setDateResolution(monthField, DateTools.Resolution.MONTH); @@ -651,7 +651,7 @@ } public void testEscaped() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); /*assertQueryEquals("\\[brackets", a, "\\[brackets"); assertQueryEquals("\\[brackets", null, "brackets"); @@ -745,7 +745,7 @@ } public void testQueryStringEscaping() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c"); assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c"); @@ -831,7 +831,7 @@ public void testBoost() throws Exception { CharacterRunAutomaton stopWords = new CharacterRunAutomaton(BasicAutomata.makeString("on")); - Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true); + Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true); QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", oneStopAnalyzer); Query q = qp.parse("on^1.0"); assertNotNull(q); @@ -844,7 +844,7 @@ q = qp.parse("\"on\"^1.0"); assertNotNull(q); - QueryParser qp2 = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); + QueryParser qp2 = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); q = qp2.parse("the^3"); // "the" is a stop word so the result is an empty query: assertNotNull(q); @@ -873,7 +873,7 @@ public void testCustomQueryParserWildcard() { try { - new QPTestParser("contents", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("a?t"); + new QPTestParser("contents", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("a?t"); fail("Wildcard queries should not be allowed"); } catch (ParseException expected) { // expected exception @@ -882,7 +882,7 @@ public void testCustomQueryParserFuzzy() throws Exception { try { - new QPTestParser("contents", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("xunit~"); + new QPTestParser("contents", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("xunit~"); fail("Fuzzy queries should not be allowed"); } catch (ParseException expected) { // expected exception @@ -892,7 +892,7 @@ public void testBooleanQuery() throws Exception { BooleanQuery.setMaxClauseCount(2); try { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); qp.parse("one two three"); fail("ParseException expected due to too many boolean clauses"); } catch (ParseException expected) { @@ -904,7 +904,7 @@ * This test differs from TestPrecedenceQueryParser */ public void testPrecedence() throws Exception { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); Query query1 = qp.parse("A AND B OR C AND D"); Query query2 = qp.parse("+A +B +C +D"); assertEquals(query1, query2); @@ -913,7 +913,7 @@ // Todo: convert this from DateField to DateUtil // public void testLocalDateFormat() throws IOException, ParseException { // Directory ramDir = newDirectory(); -// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); +// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); // addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw); // addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw); // iw.close(); @@ -940,7 +940,7 @@ public void testStarParsing() throws Exception { final int[] type = new int[1]; - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.WHITESPACE, false)) { + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)) { @Override protected Query getWildcardQuery(String field, String termStr) throws ParseException { // override error checking of superclass @@ -999,13 +999,13 @@ } public void testEscapedWildcard() throws Exception { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r")); assertEquals(q, qp.parse("foo\\?ba?r")); } public void testRegexps() throws Exception { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); RegexpQuery q = new RegexpQuery(new Term("field", "[a-z][123]")); assertEquals(q, qp.parse("/[a-z][123]/")); qp.setLowercaseExpandedTerms(true); @@ -1033,7 +1033,7 @@ public void testStopwords() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true)); Query result = qp.parse("a:the OR a:foo"); assertNotNull("result is null and it shouldn't be", result); assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery); @@ -1049,7 +1049,7 @@ } public void testPositionIncrement() throws Exception { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); qp.setEnablePositionIncrements(true); String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\""; // 0 2 5 7 8 @@ -1066,7 +1066,7 @@ } public void testMatchAllDocs() throws Exception { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); assertEquals(new MatchAllDocsQuery(), qp.parse("*:*")); assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)")); BooleanQuery bq = (BooleanQuery)qp.parse("+*:* -*:*"); @@ -1075,7 +1075,7 @@ } private void assertHits(int expected, String query, IndexSearcher is) throws ParseException, IOException { - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "date", new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "date", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); qp.setLocale(Locale.ENGLISH); Query q = qp.parse(query); ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs; @@ -1093,7 +1093,7 @@ // "match" public void testPositionIncrements() throws Exception { Directory dir = newDirectory(); - Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); + Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, a)); Document doc = new Document(); doc.add(newField("f", "the wizard of ozzy", Field.Store.NO, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java =================================================================== --- lucene/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/analysis/TestMockAnalyzer.java (working copy) @@ -29,7 +29,7 @@ /** Test a configuration that behaves a lot like WhitespaceAnalyzer */ public void testWhitespace() throws Exception { - Analyzer a = new MockAnalyzer(); + Analyzer a = new MockAnalyzer(random); assertAnalyzesTo(a, "A bc defg hiJklmn opqrstuv wxy z ", new String[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" }); assertAnalyzesToReuse(a, "aba cadaba shazam", @@ -40,7 +40,7 @@ /** Test a configuration that behaves a lot like SimpleAnalyzer */ public void testSimple() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); assertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ", new String[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" }); assertAnalyzesToReuse(a, "aba4cadaba-Shazam", @@ -51,7 +51,7 @@ /** Test a configuration that behaves a lot like KeywordAnalyzer */ public void testKeyword() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.KEYWORD, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.KEYWORD, false); assertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ", new String[] { "a-bc123 defg+hijklmn567opqrstuv78wxy_z " }); assertAnalyzesToReuse(a, "aba4cadaba-Shazam", @@ -62,13 +62,13 @@ /** Test a configuration that behaves a lot like StopAnalyzer */ public void testStop() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); + Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); assertAnalyzesTo(a, "the quick brown a fox", new String[] { "quick", "brown", "fox" }, new int[] { 2, 1, 2 }); // disable positions - a = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false); assertAnalyzesTo(a, "the quick brown a fox", new String[] { "quick", "brown", "fox" }, new int[] { 1, 1, 1 }); @@ -81,7 +81,7 @@ BasicOperations.complement( Automaton.union( Arrays.asList(BasicAutomata.makeString("foo"), BasicAutomata.makeString("bar"))))); - Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true, keepWords, true); + Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, keepWords, true); assertAnalyzesTo(a, "quick foo brown bar bar fox foo", new String[] { "foo", "bar", "bar", "foo" }, new int[] { 2, 2, 1, 2 }); @@ -90,7 +90,7 @@ /** Test a configuration that behaves a lot like LengthFilter */ public void testLength() throws Exception { CharacterRunAutomaton length5 = new CharacterRunAutomaton(new RegExp(".{5,}").toAutomaton()); - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, true, length5, true); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, true, length5, true); assertAnalyzesTo(a, "ok toolong fine notfine", new String[] { "ok", "fine" }, new int[] { 1, 2 }); Index: lucene/src/test/org/apache/lucene/TestExternalCodecs.java =================================================================== --- lucene/src/test/org/apache/lucene/TestExternalCodecs.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/TestExternalCodecs.java (working copy) @@ -509,7 +509,7 @@ dir.setCheckIndexOnClose(false); // we use a custom codec provider IndexWriter w = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, true)). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setCodecProvider(provider). setMergePolicy(newLogMergePolicy(3)) ); Index: lucene/src/test/org/apache/lucene/TestDemo.java =================================================================== --- lucene/src/test/org/apache/lucene/TestDemo.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/TestDemo.java (working copy) @@ -43,13 +43,13 @@ public class TestDemo extends LuceneTestCase { public void testDemo() throws IOException, ParseException { - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); // Store the index in memory: Directory directory = newDirectory(); // To store an index on disk, use this instead: //Directory directory = FSDirectory.open("/tmp/testindex"); - RandomIndexWriter iwriter = new RandomIndexWriter(random, directory); + RandomIndexWriter iwriter = new RandomIndexWriter(random, directory, analyzer); iwriter.w.setInfoStream(VERBOSE ? System.out : null); Document doc = new Document(); String longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; Index: lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java (working copy) @@ -79,7 +79,7 @@ int terms = (int) Math.pow(2, bits); Directory dir = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); Field field = newField("field", "", Field.Store.NO, Field.Index.ANALYZED); Index: lucene/src/test/org/apache/lucene/search/TestNot.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestNot.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestNot.java (working copy) @@ -45,7 +45,7 @@ IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader); - QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer()); + QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random)); Query query = parser.parse("a NOT b"); //System.out.println(query); ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; Index: lucene/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (working copy) @@ -89,7 +89,7 @@ for (int i = 1; i < docText.length; i++) { qtxt += ' ' + docText[i]; // large query so that search will be longer } - QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer()); + QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(random)); query = queryParser.parse(qtxt); // warm the searcher Index: lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (working copy) @@ -160,7 +160,7 @@ RandomIndexWriter writer = new RandomIndexWriter( random, dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergeScheduler(new SerialMergeScheduler()). // asserts below requires no unexpected merges: setMergePolicy(newLogMergePolicy(10)) Index: lucene/src/test/org/apache/lucene/search/TestBooleanQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestBooleanQuery.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestBooleanQuery.java (working copy) @@ -143,7 +143,7 @@ IndexReader reader2 = iw2.getReader(); iw2.close(); - QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer()); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new MockAnalyzer(random)); qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE); MultiReader multireader = new MultiReader(reader1, reader2); Index: lucene/src/test/org/apache/lucene/search/TestPhraseQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestPhraseQuery.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestPhraseQuery.java (working copy) @@ -212,7 +212,7 @@ public void testPhraseQueryWithStopAnalyzer() throws Exception { Directory directory = newDirectory(); - Analyzer stopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false); + Analyzer stopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false); RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig( Version.LUCENE_40, stopAnalyzer)); Document doc = new Document(); @@ -285,7 +285,7 @@ reader.close(); writer = new RandomIndexWriter(random, directory, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); doc = new Document(); doc.add(newField("contents", "map entry woo", Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc); @@ -335,7 +335,7 @@ public void testSlopScoring() throws IOException { Directory directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add(newField("field", "foo firstname lastname foo", Field.Store.YES, Field.Index.ANALYZED)); @@ -374,7 +374,7 @@ } public void testToString() throws Exception { - Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); + Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer); qp.setEnablePositionIncrements(true); PhraseQuery q = (PhraseQuery)qp.parse("\"this hi this is a test is\""); @@ -596,7 +596,7 @@ public void testRandomPhrases() throws Exception { Directory dir = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); List> docs = new ArrayList>(); Index: lucene/src/test/org/apache/lucene/search/TestPositionIncrement.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestPositionIncrement.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestPositionIncrement.java (working copy) @@ -197,7 +197,7 @@ // should not find "1 2" because there is a gap of 1 in the index QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", - new MockAnalyzer(MockTokenizer.WHITESPACE, false, stopStopList, false, false)); + new MockAnalyzer(random, MockTokenizer.WHITESPACE, false, stopStopList, false)); q = (PhraseQuery) qp.parse("\"1 2\""); hits = searcher.search(q, null, 1000).scoreDocs; assertEquals(0, hits.length); @@ -221,7 +221,7 @@ // when both qp qnd stopFilter propagate increments, we should find the doc. qp = new QueryParser(TEST_VERSION_CURRENT, "field", - new MockAnalyzer(MockTokenizer.WHITESPACE, false, stopStopList, true, false)); + new MockAnalyzer(random, MockTokenizer.WHITESPACE, false, stopStopList, true)); qp.setEnablePositionIncrements(true); q = (PhraseQuery) qp.parse("\"1 stop 2\""); hits = searcher.search(q, null, 1000).scoreDocs; Index: lucene/src/test/org/apache/lucene/search/TestTermRangeQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTermRangeQuery.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestTermRangeQuery.java (working copy) @@ -243,7 +243,7 @@ } private void initializeIndex(String[] values) throws IOException { - initializeIndex(values, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + initializeIndex(values, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); } private void initializeIndex(String[] values, Analyzer analyzer) throws IOException { @@ -255,8 +255,9 @@ writer.close(); } + // shouldnt create an analyzer for every doc? private void addDoc(String content) throws IOException { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND)); insertDoc(writer, content); writer.close(); } Index: lucene/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java (working copy) @@ -116,7 +116,7 @@ query.setSlop(slop); Directory ramDir = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random, ramDir, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + RandomIndexWriter writer = new RandomIndexWriter(random, ramDir, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); writer.addDocument(doc); IndexReader reader = writer.getReader(); Index: lucene/src/test/org/apache/lucene/search/cache/TestEntryCreators.java =================================================================== --- lucene/src/test/org/apache/lucene/search/cache/TestEntryCreators.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/cache/TestEntryCreators.java (working copy) @@ -67,7 +67,7 @@ public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer= new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); typeTests = new NumberTypeTester[] { new NumberTypeTester( "theRandomByte", "getBytes", ByteValuesCreator.class, ByteParser.class ), Index: lucene/src/test/org/apache/lucene/search/TestDateSort.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestDateSort.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestDateSort.java (working copy) @@ -81,7 +81,7 @@ Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.STRING, true)); - QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD, new MockAnalyzer()); + QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD, new MockAnalyzer(random)); Query query = queryParser.parse("Document"); // Execute the search and process the search results. Index: lucene/src/test/org/apache/lucene/search/TestSort.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestSort.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/search/TestSort.java (working copy) @@ -111,7 +111,7 @@ throws IOException { Directory indexStore = newDirectory(); dirs.add(indexStore); - RandomIndexWriter writer = new RandomIndexWriter(random, indexStore, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random, indexStore, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); for (int i=0; i processors = new HashMap(); processors.put(dir, new PerTermPayloadProcessor()); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors)); writer.optimize(); writer.close(); Index: lucene/src/test/org/apache/lucene/index/TestDoc.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDoc.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestDoc.java (working copy) @@ -114,7 +114,7 @@ Directory directory = newFSDirectory(indexDir); IndexWriter writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(-1). setMergePolicy(newLogMergePolicy(10)) @@ -148,7 +148,7 @@ directory = newFSDirectory(indexDir); writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(-1). setMergePolicy(newLogMergePolicy(10)) Index: lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestParallelTermEnum.java (working copy) @@ -38,7 +38,7 @@ super.setUp(); Document doc; rd1 = newDirectory(); - IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); doc = new Document(); doc.add(newField("field1", "the quick brown fox jumps", Store.YES, @@ -50,7 +50,7 @@ iw1.close(); rd2 = newDirectory(); - IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); doc = new Document(); doc.add(newField("field0", "", Store.NO, Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexReaderClone.java (working copy) @@ -199,7 +199,7 @@ TestIndexReaderReopen.createIndex(random, dir1, true); IndexReader reader1 = IndexReader.open(dir1, false); IndexWriter w = new IndexWriter(dir1, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.optimize(); w.close(); IndexReader reader2 = reader1.clone(true); @@ -496,7 +496,7 @@ final Directory dir = newDirectory(); IndexWriter w = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(false)) ); Document doc = new Document(); Index: lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestSegmentTermEnum.java (working copy) @@ -48,7 +48,7 @@ public void testTermEnum() throws IOException { IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); // ADD 100 documents with term : aaa // add 100 documents with terms: aaa bbb @@ -64,7 +64,7 @@ verifyDocFreq(); // merge segments by optimizing the index - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); writer.optimize(); writer.close(); @@ -74,7 +74,7 @@ public void testPrevTermAtEnd() throws IOException { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); addDoc(writer, "aaa bbb"); writer.close(); SegmentReader reader = getOnlySegmentReader(IndexReader.open(dir, false)); Index: lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestDocsAndPositions.java (working copy) @@ -21,7 +21,6 @@ import java.util.Arrays; import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader.AtomicReaderContext; @@ -34,13 +33,11 @@ public class TestDocsAndPositions extends LuceneTestCase { private String fieldName; - private boolean usePayload; @Override public void setUp() throws Exception { super.setUp(); fieldName = "field" + random.nextInt(); - usePayload = random.nextBoolean(); } /** @@ -49,8 +46,7 @@ public void testPositionsSimple() throws IOException { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer( - MockTokenizer.WHITESPACE, true, usePayload))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < 39; i++) { Document doc = new Document(); doc.add(newField(fieldName, "1 2 3 4 5 6 7 8 9 10 " @@ -75,7 +71,7 @@ final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader.maxDoc())); do { String msg = "Advanced to: " + advance + " current doc: " - + docsAndPosEnum.docID() + " usePayloads: " + usePayload; + + docsAndPosEnum.docID(); // TODO: + " usePayloads: " + usePayload; assertEquals(msg, 4, docsAndPosEnum.freq()); assertEquals(msg, 0, docsAndPosEnum.nextPosition()); assertEquals(msg, 4, docsAndPosEnum.freq()); @@ -115,8 +111,7 @@ public void testRandomPositions() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer( - MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); int numDocs = 131; int max = 1051; int term = random.nextInt(max); @@ -176,8 +171,8 @@ for (int j = 0; j < howMany; j++) { assertEquals("iteration: " + i + " initDoc: " + initDoc + " doc: " + docID + " base: " + atomicReaderContext.docBase - + " positions: " + Arrays.toString(pos) + " usePayloads: " - + usePayload, pos[j].intValue(), docsAndPosEnum.nextPosition()); + + " positions: " + Arrays.toString(pos) /* TODO: + " usePayloads: " + + usePayload*/, pos[j].intValue(), docsAndPosEnum.nextPosition()); } if (random.nextInt(10) == 0) { // once is a while advance @@ -196,8 +191,7 @@ public void testRandomDocs() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer( - MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); int numDocs = 499; int max = 15678; int term = random.nextInt(max); @@ -275,8 +269,7 @@ public void testLargeNumberOfPositions() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer( - MockTokenizer.WHITESPACE, true, usePayload))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); int howMany = 1000; for (int i = 0; i < 39; i++) { Document doc = new Document(); @@ -315,8 +308,7 @@ } else { initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc)); } - String msg = "Iteration: " + i + " initDoc: " + initDoc + " payloads: " - + usePayload; + String msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload; assertEquals(howMany / 2, docsAndPosEnum.freq()); for (int j = 0; j < howMany; j += 2) { assertEquals("position missmatch index: " + j + " with freq: " Index: lucene/src/test/org/apache/lucene/index/TestRollback.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestRollback.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestRollback.java (working copy) @@ -38,7 +38,7 @@ rw.close(); // If buffer size is small enough to cause a flush, errors ensue... - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND)); Term pkTerm = new Term("pk", ""); for (int i = 0; i < 3; i++) { Index: lucene/src/test/org/apache/lucene/index/TestIndexReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexReader.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexReader.java (working copy) @@ -69,7 +69,7 @@ // set up writer IndexWriter writer = new IndexWriter(d, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2)); for(int i=0;i<27;i++) addDocumentWithFields(writer); @@ -92,7 +92,7 @@ // Change the index writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(2)); for(int i=0;i<7;i++) addDocumentWithFields(writer); @@ -104,7 +104,7 @@ r3.close(); writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND)); writer.optimize(); writer.close(); @@ -119,7 +119,7 @@ public void testIsCurrent() throws Exception { Directory d = newDirectory(); IndexWriter writer = new IndexWriter(d, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); writer.close(); // set up reader: @@ -127,13 +127,13 @@ assertTrue(reader.isCurrent()); // modify index by adding another document: writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); addDocumentWithFields(writer); writer.close(); assertFalse(reader.isCurrent()); // re-create index: writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); addDocumentWithFields(writer); writer.close(); assertFalse(reader.isCurrent()); @@ -150,7 +150,7 @@ // set up writer IndexWriter writer = new IndexWriter( d, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) ); Document doc = new Document(); @@ -172,7 +172,7 @@ // add more documents writer = new IndexWriter( d, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMergePolicy(newLogMergePolicy()) ); @@ -271,7 +271,7 @@ // set up writer IndexWriter writer = new IndexWriter( d, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy()) ); // want to get some more segments here @@ -330,7 +330,7 @@ Term searchTerm = new Term("content", "aaa"); // add 100 documents with term : aaa - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.setInfoStream(VERBOSE ? System.out : null); for (int i = 0; i < 100; i++) { addDoc(writer, searchTerm.text()); @@ -371,7 +371,7 @@ Directory dir = newDirectory(); byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); for (int i = 0; i < 10; i++) { addDoc(writer, "document number " + (i + 1)); @@ -380,7 +380,7 @@ addDocumentWithTermVectorFields(writer); } writer.close(); - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add(new Field("bin1", bin)); doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED)); @@ -417,7 +417,7 @@ // force optimize - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())); writer.optimize(); writer.close(); reader = IndexReader.open(dir, false); @@ -446,7 +446,7 @@ Term searchTerm = new Term("content", "aaa"); // add 11 documents with term : aaa - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < 11; i++) { addDoc(writer, searchTerm.text()); } @@ -489,7 +489,7 @@ Term searchTerm = new Term("content", "aaa"); // add 11 documents with term : aaa - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.commit(); for (int i = 0; i < 11; i++) { addDoc(writer, searchTerm.text()); @@ -532,7 +532,7 @@ Term searchTerm = new Term("content", "aaa"); // add 1 documents with term : aaa - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDoc(writer, searchTerm.text()); writer.close(); @@ -577,7 +577,7 @@ // add 1 documents with term : aaa writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(false)) ); addDoc(writer, searchTerm.text()); @@ -632,7 +632,7 @@ Term searchTerm2 = new Term("content", "bbb"); // add 100 documents with term : aaa - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); for (int i = 0; i < 100; i++) { addDoc(writer, searchTerm.text()); } @@ -647,7 +647,7 @@ assertTermDocsCount("first reader", reader, searchTerm2, 0); // add 100 documents with term : bbb - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); for (int i = 0; i < 100; i++) { addDoc(writer, searchTerm2.text()); } @@ -708,7 +708,7 @@ // Create initial data set File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose"); Directory dir = newFSDirectory(dirFile); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDoc(writer, "test"); writer.close(); dir.close(); @@ -718,7 +718,7 @@ dir = newFSDirectory(dirFile); // Now create the data set again, just as before - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); addDoc(writer, "test"); writer.close(); dir.close(); @@ -738,7 +738,7 @@ for(int i=0;i<2;i++) { final Directory dir = newDirectory(); assertFalse(IndexReader.indexExists(dir)); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); addDocumentWithFields(writer); assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked writer.close(); @@ -755,7 +755,7 @@ // incremented: Thread.sleep(1000); - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); addDocumentWithFields(writer); writer.close(); reader = IndexReader.open(dir, false); @@ -768,7 +768,7 @@ public void testVersion() throws IOException { Directory dir = newDirectory(); assertFalse(IndexReader.indexExists(dir)); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked writer.close(); @@ -779,7 +779,7 @@ reader.close(); // modify index and check version has been // incremented: - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); addDocumentWithFields(writer); writer.close(); reader = IndexReader.open(dir, false); @@ -790,10 +790,10 @@ public void testLock() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); writer.close(); - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); IndexReader reader = IndexReader.open(dir, false); try { reader.deleteDocument(0); @@ -814,7 +814,7 @@ public void testUndeleteAll() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); addDocumentWithFields(writer); writer.close(); @@ -831,7 +831,7 @@ public void testUndeleteAllAfterClose() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); addDocumentWithFields(writer); writer.close(); @@ -847,7 +847,7 @@ public void testUndeleteAllAfterCloseThenReopen() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); addDocumentWithFields(writer); writer.close(); @@ -883,7 +883,7 @@ // First build up a starting index: MockDirectoryWrapper startDir = newDirectory(); - IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); if (VERBOSE) { System.out.println("TEST: create initial index"); writer.setInfoStream(System.out); @@ -1067,7 +1067,7 @@ public void testDocsOutOfOrderJIRA140() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); for(int i=0;i<11;i++) { addDoc(writer, "aaa"); } @@ -1085,7 +1085,7 @@ } reader.close(); - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); // We must add more docs to get a new segment written for(int i=0;i<11;i++) { @@ -1107,7 +1107,7 @@ public void testExceptionReleaseWriteLockJIRA768() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDoc(writer, "aaa"); writer.close(); @@ -1163,7 +1163,7 @@ public void testMultiReaderDeletes() throws Exception { Directory dir = newDirectory(); - RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add(newField("f", "doctor", Field.Store.NO, Field.Index.NOT_ANALYZED)); w.addDocument(doc); @@ -1199,7 +1199,7 @@ // add 100 documents with term : aaa // add 100 documents with term : bbb // add 100 documents with term : ccc - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); for (int i = 0; i < 100; i++) { addDoc(writer, searchTerm1.text()); addDoc(writer, searchTerm2.text()); @@ -1421,7 +1421,7 @@ // set up writer IndexWriter writer = new IndexWriter( d, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(10)) ); @@ -1441,7 +1441,7 @@ // Change the index writer = new IndexWriter( d, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(10)) @@ -1456,7 +1456,7 @@ r2.close(); writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND)); writer.optimize(); writer.close(); @@ -1472,7 +1472,7 @@ public void testReadOnly() throws Throwable { Directory d = newDirectory(); IndexWriter writer = new IndexWriter(d, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocumentWithFields(writer); writer.commit(); addDocumentWithFields(writer); @@ -1488,7 +1488,7 @@ writer = new IndexWriter( d, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMergePolicy(newLogMergePolicy(10)) ); @@ -1509,7 +1509,7 @@ } writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND)); writer.optimize(); writer.close(); @@ -1530,7 +1530,7 @@ // Make sure write lock isn't held writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND)); writer.close(); @@ -1543,7 +1543,7 @@ public void testIndexReader() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.addDocument(createDocument("a")); writer.addDocument(createDocument("b")); writer.addDocument(createDocument("c")); @@ -1562,7 +1562,7 @@ MockDirectoryWrapper dir = newDirectory(); dir.setPreventDoubleWrite(false); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.addDocument(createDocument("a")); writer.addDocument(createDocument("b")); writer.addDocument(createDocument("c")); @@ -1604,7 +1604,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2)); writer.addDocument(createDocument("a")); writer.addDocument(createDocument("a")); @@ -1628,7 +1628,7 @@ // reuse the doc values arrays in FieldCache public void testFieldCacheReuseAfterClone() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED)); writer.addDocument(doc); @@ -1661,7 +1661,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(10)) ); Document doc = new Document(); @@ -1697,7 +1697,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(-1). setMergePolicy(newLogMergePolicy(10)) ); @@ -1741,7 +1741,7 @@ // LUCENE-1586: getUniqueTermCount public void testUniqueTermCount() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); Document doc = new Document(); doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED)); doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED)); @@ -1774,7 +1774,7 @@ // LUCENE-1609: don't load terms index public void testNoTermsIndex() throws Throwable { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); Document doc = new Document(); doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED)); doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED)); @@ -1793,7 +1793,7 @@ assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor()); writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setCodecProvider(_TestUtil.alwaysCodec("Standard")). setMergePolicy(newLogMergePolicy(10)) ); @@ -1821,7 +1821,7 @@ public void testPrepareCommitIsCurrent() throws Throwable { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.commit(); Document doc = new Document(); writer.addDocument(doc); @@ -1866,7 +1866,7 @@ // LUCENE-2812 public void testIndexExists() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.addDocument(new Document()); writer.prepareCommit(); assertFalse(IndexReader.indexExists(dir)); @@ -1879,7 +1879,7 @@ // dict cache public void testTotalTermFreqCached() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d = new Document(); d.add(newField("f", "a a b", Field.Index.ANALYZED)); writer.addDocument(d); @@ -1901,7 +1901,7 @@ // LUCENE-2474 public void testReaderFinishedListener() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3); writer.setInfoStream(VERBOSE ? System.out : null); writer.addDocument(new Document()); Index: lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java (working copy) @@ -148,7 +148,7 @@ public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException { Map docs = new HashMap(); IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB( + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB( 0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy())); w.setInfoStream(VERBOSE ? System.out : null); w.commit(); @@ -204,7 +204,7 @@ System.out.println("TEST: iter=" + iter); } IndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE) + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE) .setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setMaxThreadStates(maxThreadStates) .setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy())); w.setInfoStream(VERBOSE ? System.out : null); @@ -248,7 +248,7 @@ public static void indexSerial(Random random, Map docs, Directory dir) throws IOException { - IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); // index all docs in a single thread Iterator iter = docs.values().iterator(); Index: lucene/src/test/org/apache/lucene/index/TestFlex.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFlex.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestFlex.java (working copy) @@ -32,7 +32,7 @@ IndexWriter w = new IndexWriter( d, - new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer()). + new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random)). setMaxBufferedDocs(7) ); @@ -64,7 +64,7 @@ public void testTermOrd() throws Exception { Directory d = newDirectory(); IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); + new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); Document doc = new Document(); doc.add(newField("f", "a b c", Field.Store.NO, Field.Index.ANALYZED)); w.addDocument(doc); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java (working copy) @@ -58,7 +58,7 @@ } MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory()); dir.setMaxSizeInBytes(diskFree); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.setInfoStream(VERBOSE ? System.out : null); MergeScheduler ms = writer.getConfig().getMergeScheduler(); if (ms instanceof ConcurrentMergeScheduler) { @@ -152,7 +152,7 @@ long inputDiskUsage = 0; for(int i=0;i listCommits = IndexReader.listCommits(dir); assertEquals(2, listCommits.size()); writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setIndexDeletionPolicy( + new MockAnalyzer(random)).setIndexDeletionPolicy( new KeepAllDeletionPolicy()).setIndexCommit(listCommits.get(0))); d = new Document(); @@ -247,7 +247,7 @@ assertFNXFiles(dir, "1.fnx", "2.fnx", "3.fnx"); writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer())); + new MockAnalyzer(random))); writer.commit(); listCommits = IndexReader.listCommits(dir); assertEquals(1, listCommits.size()); @@ -290,9 +290,9 @@ } Directory base = buildRandomIndex(fieldNames.toArray(new String[0]), 20 + random.nextInt(100), - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); IndexWriter writer = new IndexWriter(base, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); FieldNumberBiMap globalFieldMap = writer.segmentInfos .getOrLoadGlobalFieldNumberMap(base); Set> entries = globalFieldMap.entries(); @@ -315,7 +315,7 @@ Directory base = newDirectory(); IndexWriter writer = new IndexWriter(base, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); for (String string : fieldNames) { doc.add(newField(string, @@ -339,9 +339,9 @@ for (int j = 0; j < numIndexes; j++) { Directory toAdd = buildRandomIndex(fieldNames.toArray(new String[0]), 1 + random.nextInt(50), - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); IndexWriter w = new IndexWriter(base, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); if (random.nextBoolean()) { IndexReader open = IndexReader.open(toAdd); w.addIndexes(open); @@ -357,7 +357,7 @@ toAdd.close(); } IndexWriter w = new IndexWriter(base, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy( + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy( new LogByteSizeMergePolicy())); w.optimize(); w.close(); @@ -402,7 +402,7 @@ } Directory base = newDirectory(); IndexWriter writer = new IndexWriter(base, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy( + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy( NoMergePolicy.NO_COMPOUND_FILES)); SortedMap copySortedMap = new TreeMap( @@ -428,7 +428,7 @@ writer.close(); writer = new IndexWriter(base, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); + new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); writer.commit(); // make sure the old index is the latest segment writer.close(); @@ -459,7 +459,7 @@ .unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir); dir = newFSDirectory(oldIndxeDir); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(policy)); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(policy)); SegmentInfos segmentInfos = writer.segmentInfos; assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat()); assertEquals(0, segmentInfos.getGlobalFieldMapVersion()); Index: lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (working copy) @@ -36,7 +36,7 @@ super.setUp(); dir = newDirectory(); DocHelper.setupDoc(testDoc); - info = DocHelper.writeDoc(dir, testDoc); + info = DocHelper.writeDoc(random, dir, testDoc); } @Override @@ -105,7 +105,7 @@ public void testSkipTo(int indexDivisor) throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Term ta = new Term("content","aaa"); for(int i = 0; i < 10; i++) @@ -251,7 +251,7 @@ public void testIndexDivisor() throws IOException { testDoc = new Document(); DocHelper.setupDoc(testDoc); - DocHelper.writeDoc(dir, testDoc); + DocHelper.writeDoc(random, dir, testDoc); testTermDocs(2); testBadSeek(2); testSkipTo(2); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriter.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (working copy) @@ -86,7 +86,7 @@ try { IndexWriterConfig.setDefaultWriteLockTimeout(2000); assertEquals(2000, IndexWriterConfig.getDefaultWriteLockTimeout()); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); } finally { IndexWriterConfig.setDefaultWriteLockTimeout(savedWriteLockTimeout); } @@ -110,7 +110,7 @@ reader.close(); // optimize the index and check that the new doc count is correct - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); assertEquals(60, writer.numDocs()); writer.optimize(); assertEquals(60, writer.maxDoc()); @@ -125,7 +125,7 @@ // make sure opening a new index for create over // this existing one works correctly: - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); assertEquals(0, writer.maxDoc()); assertEquals(0, writer.numDocs()); writer.close(); @@ -153,7 +153,7 @@ String[] startFiles = dir.listAll(); SegmentInfos infos = new SegmentInfos(); infos.read(dir); - new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).rollback(); + new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).rollback(); String[] endFiles = dir.listAll(); Arrays.sort(startFiles); @@ -176,7 +176,7 @@ ldmp.setMinMergeDocs(1); ldmp.setMergeFactor(5); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy( ldmp)); for(int j=0;j data = new HashMap(); @@ -2008,7 +2008,7 @@ assertEquals("test1", r.getCommitUserData().get("label")); r.close(); - w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.optimize(); w.close(); @@ -2021,7 +2021,7 @@ // LUCENE-2529 public void testPositionIncrementGapEmptyField() throws Exception { Directory dir = newDirectory(); - MockAnalyzer analyzer = new MockAnalyzer(); + MockAnalyzer analyzer = new MockAnalyzer(random); analyzer.setPositionIncrementGap( 100 ); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)); @@ -2059,7 +2059,7 @@ out.writeByte((byte) 42); out.close(); - new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close(); + new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close(); assertTrue(dir.fileExists("myrandomfile")); } finally { @@ -2069,7 +2069,7 @@ public void testDeadlock() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); Document doc = new Document(); doc.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); @@ -2080,7 +2080,7 @@ // index has 2 segments Directory dir2 = newDirectory(); - IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer2.addDocument(doc); writer2.close(); @@ -2119,7 +2119,7 @@ w.close(); } IndexWriterConfig conf = newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2); w = new IndexWriter(dir, conf); Document doc = new Document(); @@ -2224,7 +2224,7 @@ public void testIndexStoreCombos() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); byte[] b = new byte[50]; for(int i=0;i<50;i++) b[i] = (byte) (i+77); @@ -2287,7 +2287,7 @@ // LUCENE-1727: make sure doc fields are stored in order public void testStoredFieldsOrder() throws Throwable { Directory d = newDirectory(); - IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(newField("zzz", "a b c", Field.Store.YES, Field.Index.NO)); doc.add(newField("aaa", "a b c", Field.Store.YES, Field.Index.NO)); @@ -2319,7 +2319,7 @@ public void testEmbeddedFFFF() throws Throwable { Directory d = newDirectory(); - IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(newField("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED)); w.addDocument(doc); @@ -2337,7 +2337,7 @@ public void testNoDocsIndex() throws Throwable { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); ByteArrayOutputStream bos = new ByteArrayOutputStream(1024); writer.setInfoStream(new PrintStream(bos)); writer.addDocument(new Document()); @@ -2355,7 +2355,7 @@ final double RUN_SEC = 0.5; final Directory dir = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); _TestUtil.reduceOpenFiles(w.w); w.commit(); final AtomicBoolean failed = new AtomicBoolean(); @@ -2527,7 +2527,7 @@ public void testIndexDivisor() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); config.setTermIndexInterval(2); IndexWriter w = new IndexWriter(dir, config); StringBuilder s = new StringBuilder(); @@ -2564,7 +2564,7 @@ IndexWriter w = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(mergePolicy) ); Document doc = new Document(); @@ -2626,7 +2626,7 @@ Directory dir = newDirectory(); SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setIndexDeletionPolicy(sdp)); // First commit @@ -2670,7 +2670,14 @@ final Random r = random; Directory dir = newDirectory(); - FlushCountingIndexWriter w = new FlushCountingIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, false)).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(-1).setMaxBufferedDeleteTerms(-1)); + // note this test explicitly disables payloads + final Analyzer analyzer = new Analyzer() { + @Override + public TokenStream tokenStream(String fieldName, Reader reader) { + return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true); + } + }; + FlushCountingIndexWriter w = new FlushCountingIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(-1).setMaxBufferedDeleteTerms(-1)); w.setInfoStream(VERBOSE ? System.out : null); Document doc = new Document(); doc.add(newField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO, Field.Index.ANALYZED)); @@ -2708,7 +2715,7 @@ // changed since LUCENE-2386, where before IW would always commit on a fresh // new index. Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); try { IndexReader.listCommits(dir); fail("listCommits should have thrown an exception over empty index"); @@ -2726,7 +2733,7 @@ // then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed // when listAll() was called in IndexFileDeleter. Directory dir = newFSDirectory(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory()); - new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close(); + new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close(); dir.close(); } @@ -2736,7 +2743,7 @@ // files are left in the Directory. Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy())); String[] files = dir.listAll(); @@ -2781,14 +2788,14 @@ Directory dir = newDirectory(); dir.setLockFactory(NoLockFactory.getNoLockFactory()); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); Document doc = new Document(); doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS)); w.addDocument(doc); w.addDocument(doc); IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2) + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2) .setOpenMode(OpenMode.CREATE)); w2.close(); @@ -2800,7 +2807,7 @@ public void testFutureCommit() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); Document doc = new Document(); w.addDocument(doc); @@ -2826,7 +2833,7 @@ assertNotNull(commit); - w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit)); + w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit)); assertEquals(1, w.numDocs()); @@ -2876,7 +2883,7 @@ public void testRandomStoredFields() throws IOException { Directory dir = newDirectory(); Random rand = random; - RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20))); + RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20))); //w.w.setInfoStream(System.out); //w.w.setUseCompoundFile(false); if (VERBOSE) { @@ -2972,7 +2979,7 @@ public void testNoUnwantedTVFiles() throws Exception { Directory dir = newDirectory(); - IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy())); + IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy())); ((LogMergePolicy) indexWriter.getConfig().getMergePolicy()).setUseCompoundFile(false); String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg"; Index: lucene/src/test/org/apache/lucene/index/TestCheckIndex.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestCheckIndex.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestCheckIndex.java (working copy) @@ -34,7 +34,7 @@ public void testDeletedDocs() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); Document doc = new Document(); doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); for(int i=0;i<19;i++) { Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (working copy) @@ -155,7 +155,7 @@ } MockDirectoryWrapper dir = newDirectory(); - MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()) + MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler())); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); //writer.setMaxBufferedDocs(10); @@ -201,7 +201,7 @@ public void testRandomExceptionsThreads() throws Throwable { MockDirectoryWrapper dir = newDirectory(); - MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()) + MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler())); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); //writer.setMaxBufferedDocs(10); @@ -289,7 +289,7 @@ public void testExceptionDocumentsWriterInit() throws IOException { Directory dir = newDirectory(); - MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.setInfoStream(VERBOSE ? System.out : null); Document doc = new Document(); doc.add(newField("field", "a field", Field.Store.YES, @@ -310,7 +310,7 @@ // LUCENE-1208 public void testExceptionJustBeforeFlush() throws IOException { Directory dir = newDirectory(); - MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); w.setInfoStream(VERBOSE ? System.out : null); Document doc = new Document(); doc.add(newField("field", "a field", Field.Store.YES, @@ -361,7 +361,7 @@ // LUCENE-1210 public void testExceptionOnMergeInit() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()) + IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy()); ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2); MockIndexWriter3 w = new MockIndexWriter3(dir, conf); @@ -494,7 +494,7 @@ failure.setDoFail(); dir.failOn(failure); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); Document doc = new Document(); String contents = "aa bb cc dd ee ff gg hh ii jj kk"; doc.add(newField("content", contents, Field.Store.NO, @@ -765,7 +765,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(5)) @@ -847,7 +847,7 @@ for (FailOnlyInCommit failure : failures) { MockDirectoryWrapper dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(newField("field", "a field", Field.Store.YES, Field.Index.ANALYZED)); @@ -872,7 +872,7 @@ public void testOptimizeExceptions() throws IOException { Directory startDir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()); ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100); IndexWriter w = new IndexWriter(startDir, conf); for(int i=0;i<27;i++) @@ -884,7 +884,7 @@ System.out.println("TEST: iter " + i); } MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir)); - conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler()); + conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler()); ((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions(); w = new IndexWriter(dir, conf); w.setInfoStream(VERBOSE ? System.out : null); @@ -908,7 +908,7 @@ final List thrown = new ArrayList(); final Directory dir = newDirectory(); final IndexWriter writer = new IndexWriter(dir, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())) { + newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))) { @Override public void message(final String message) { if (message.startsWith("now flush at close") && 0 == thrown.size()) { @@ -951,7 +951,7 @@ // LUCENE-1347 public void testRollbackExceptionHang() throws Throwable { Directory dir = newDirectory(); - MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDoc(w); w.doFail = true; @@ -973,7 +973,7 @@ IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); // add 100 documents for (int i = 0; i < 100; i++) { @@ -1015,7 +1015,7 @@ IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); // add 100 documents for (int i = 0; i < 100; i++) { @@ -1064,7 +1064,7 @@ writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(true)) ); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setNoCFSRatio(1.0); @@ -1113,7 +1113,7 @@ IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); // add 100 documents for (int i = 0; i < 100; i++) { @@ -1151,7 +1151,7 @@ reader.close(); try { - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); } catch (Exception e) { e.printStackTrace(System.out); fail("writer failed to open on a crashed index"); Index: lucene/src/test/org/apache/lucene/index/TestTransactions.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTransactions.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestTransactions.java (working copy) @@ -93,7 +93,7 @@ IndexWriter writer1 = new IndexWriter( dir1, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(3). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(2)) @@ -104,7 +104,7 @@ // happen @ different times IndexWriter writer2 = new IndexWriter( dir2, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(3)) @@ -189,7 +189,7 @@ } public void initIndex(Directory dir) throws Throwable { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); for(int j=0; j<7; j++) { Document d = new Document(); int n = random.nextInt(); Index: lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java (working copy) @@ -39,7 +39,7 @@ public void testDoubleOffsetCounting() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); Field f = newField("field", "abcd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); doc.add(f); @@ -74,7 +74,7 @@ // LUCENE-1442 public void testDoubleOffsetCounting2() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); Field f = newField("field", "abcd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); doc.add(f); @@ -96,7 +96,7 @@ // LUCENE-1448 public void testEndOffsetPositionCharAnalyzer() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); Field f = newField("field", "abcd ", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); doc.add(f); @@ -118,7 +118,7 @@ // LUCENE-1448 public void testEndOffsetPositionWithCachingTokenFilter() throws Exception { Directory dir = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); TokenStream stream = new CachingTokenFilter(analyzer.tokenStream("field", new StringReader("abcd "))); @@ -143,7 +143,7 @@ public void testEndOffsetPositionStopFilter() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true))); + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true))); Document doc = new Document(); Field f = newField("field", "abcd the", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); doc.add(f); @@ -166,7 +166,7 @@ public void testEndOffsetPositionStandard() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); Field f = newField("field", "abcd the ", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); @@ -197,7 +197,7 @@ public void testEndOffsetPositionStandardEmptyField() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); Field f = newField("field", "", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); @@ -225,7 +225,7 @@ public void testEndOffsetPositionStandardEmptyField2() throws Exception { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); Field f = newField("field", "abcd", Field.Store.NO, @@ -259,7 +259,7 @@ Directory dir = newDirectory(); for(int iter=0;iter<2;iter++) { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2).setRAMBufferSizeMB( IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler( new SerialMergeScheduler()).setMergePolicy( @@ -292,7 +292,7 @@ reader.close(); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setMaxBufferedDocs(2) + new MockAnalyzer(random)).setMaxBufferedDocs(2) .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy( new LogDocMergePolicy())); @@ -310,7 +310,7 @@ Directory dir = newDirectory(); for(int iter=0;iter<2;iter++) { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2).setRAMBufferSizeMB( IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler( new SerialMergeScheduler()).setMergePolicy( @@ -347,7 +347,7 @@ public void testTermVectorCorruption3() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2).setRAMBufferSizeMB( IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler( new SerialMergeScheduler()).setMergePolicy(new LogDocMergePolicy())); @@ -367,7 +367,7 @@ writer.close(); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setMaxBufferedDocs(2) + new MockAnalyzer(random)).setMaxBufferedDocs(2) .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy( new LogDocMergePolicy())); @@ -390,7 +390,7 @@ public void testNoTermVectorAfterTermVector() throws IOException { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document document = new Document(); document.add(newField("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES)); @@ -417,7 +417,7 @@ public void testNoTermVectorAfterTermVectorMerge() throws IOException { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document document = new Document(); document.add(newField("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.YES)); Index: lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java (working copy) @@ -62,7 +62,7 @@ public void testAddDocument() throws Exception { Document testDoc = new Document(); DocHelper.setupDoc(testDoc); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.addDocument(testDoc); writer.commit(); SegmentInfo info = writer.newestSegment(); @@ -211,7 +211,7 @@ public void testPreAnalyzedField() throws IOException { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(new Field("preanalyzed", new TokenStream() { @@ -271,7 +271,7 @@ doc.add(newField("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.addDocument(doc); writer.close(); @@ -306,7 +306,7 @@ doc.add(newField("f2", "v2", Store.YES, Index.NO)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.addDocument(doc); writer.optimize(); // be sure to have a single segment writer.close(); Index: lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (working copy) @@ -48,7 +48,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(10). setMergePolicy(mergePolicy) ); @@ -152,7 +152,7 @@ // Open & close a writer: it should delete the above 4 // files and nothing more: - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); writer.close(); String[] files2 = dir.listAll(); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java (working copy) @@ -75,10 +75,10 @@ public void testIndexWriterLockRelease() throws IOException { Directory dir = newFSDirectory(this.__test_dir); try { - new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); } catch (FileNotFoundException e) { try { - new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); } catch (FileNotFoundException e1) { } } finally { Index: lucene/src/test/org/apache/lucene/index/TestMultiFields.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestMultiFields.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestMultiFields.java (working copy) @@ -31,7 +31,7 @@ for (int iter = 0; iter < num; iter++) { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); _TestUtil.keepFullyDeletedSegments(w); Map> docs = new HashMap>(); @@ -134,7 +134,7 @@ public void testSeparateEnums() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d = new Document(); d.add(newField("f", "j", Field.Store.NO, Field.Index.NOT_ANALYZED)); w.addDocument(d); Index: lucene/src/test/org/apache/lucene/index/TestNewestSegment.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNewestSegment.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestNewestSegment.java (working copy) @@ -24,7 +24,7 @@ public class TestNewestSegment extends LuceneTestCase { public void testNewestSegment() throws Exception { Directory directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); assertNull(writer.newestSegment()); writer.close(); directory.close(); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterConfig.java (working copy) @@ -49,7 +49,7 @@ @Test public void testDefaults() throws Exception { - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); assertEquals(MockAnalyzer.class, conf.getAnalyzer().getClass()); assertNull(conf.getIndexCommit()); assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass()); @@ -129,7 +129,7 @@ @Test public void testToString() throws Exception { - String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).toString(); + String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).toString(); for (Field f : IndexWriterConfig.class.getDeclaredFields()) { int modifiers = f.getModifiers(); if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) { @@ -146,7 +146,7 @@ @Test public void testClone() throws Exception { - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); IndexWriterConfig clone = (IndexWriterConfig) conf.clone(); // Clone is shallow since not all parameters are cloneable. @@ -158,7 +158,7 @@ @Test public void testInvalidValues() throws Exception { - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); // Test IndexDeletionPolicy assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass()); Index: lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (working copy) @@ -75,7 +75,7 @@ public void setUp() throws Exception { super.setUp(); similarityProviderOne = new SimilarityProviderOne(); - anlzr = new MockAnalyzer(); + anlzr = new MockAnalyzer(random); } /** Index: lucene/src/test/org/apache/lucene/index/TestMaxTermFrequency.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestMaxTermFrequency.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestMaxTermFrequency.java (working copy) @@ -47,7 +47,7 @@ super.setUp(); dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy()); + new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy()); config.setSimilarityProvider(new DefaultSimilarityProvider() { @Override public Similarity get(String field) { Index: lucene/src/test/org/apache/lucene/index/TestCodecs.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestCodecs.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestCodecs.java (working copy) @@ -321,7 +321,7 @@ public void testSepPositionAfterMerge() throws IOException { final Directory dir = newDirectory(); final IndexWriterConfig config = newIndexWriterConfig(Version.LUCENE_31, - new MockAnalyzer()); + new MockAnalyzer(random)); config.setCodecProvider(new MockSepCodecs()); final IndexWriter writer = new IndexWriter(dir, config); Index: lucene/src/test/org/apache/lucene/index/TestCrash.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestCrash.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestCrash.java (working copy) @@ -36,7 +36,7 @@ private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException { dir.setLockFactory(NoLockFactory.getNoLockFactory()); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler())); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); if (initialCommit) { Index: lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java (working copy) @@ -32,7 +32,7 @@ public class TestThreadedOptimize extends LuceneTestCase { - private static final Analyzer ANALYZER = new MockAnalyzer(MockTokenizer.SIMPLE, true); + private static final Analyzer ANALYZER = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); private final static int NUM_THREADS = 3; //private final static int NUM_THREADS = 5; Index: lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (working copy) @@ -72,7 +72,7 @@ FailOnlyOnFlush failure = new FailOnlyOnFlush(); directory.failOn(failure); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); writer.setInfoStream(VERBOSE ? System.out : null); Document doc = new Document(); Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED); @@ -130,7 +130,7 @@ // start: mp.setMinMergeDocs(1000); IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMergePolicy(mp)); writer.setInfoStream(VERBOSE ? System.out : null); @@ -169,7 +169,7 @@ public void testNoExtraFiles() throws IOException { MockDirectoryWrapper directory = newDirectory(); IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2)); writer.setInfoStream(VERBOSE ? System.out : null); @@ -189,7 +189,7 @@ // Reopen writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2)); writer.setInfoStream(VERBOSE ? System.out : null); } @@ -207,7 +207,7 @@ IndexWriter writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(100)) ); @@ -240,7 +240,7 @@ // Reopen writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMergePolicy(newLogMergePolicy(100)) ); Index: lucene/src/test/org/apache/lucene/index/TestOmitTf.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestOmitTf.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestOmitTf.java (working copy) @@ -65,7 +65,7 @@ // omitTermFreqAndPositions bit in the FieldInfo public void testOmitTermFreqAndPositions() throws Exception { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)); Document d = new Document(); @@ -112,7 +112,7 @@ // omitTermFreqAndPositions for the same field works public void testMixedMerge() throws Exception { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter( ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). @@ -168,7 +168,7 @@ // field, public void testMixedRAM() throws Exception { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter( ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). @@ -219,7 +219,7 @@ // Verifies no *.prx exists when all fields omit term freq: public void testNoPrxFile() throws Throwable { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); @@ -251,7 +251,7 @@ // Test scores with one field with Term Freqs and one without, otherwise with equal content public void testBasic() throws Exception { Directory dir = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (working copy) @@ -34,7 +34,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy())); for (int i = 0; i < 100; i++) { @@ -51,7 +51,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy())); boolean noOverMerge = false; @@ -76,7 +76,7 @@ mp.setMinMergeDocs(100); mp.setMergeFactor(10); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(10).setMergePolicy(mp)); for (int i = 0; i < 100; i++) { @@ -86,7 +86,7 @@ mp = new LogDocMergePolicy(); mp.setMergeFactor(10); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(mp)); mp.setMinMergeDocs(100); checkInvariants(writer); @@ -102,7 +102,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(10). setMergePolicy(newLogMergePolicy()) ); @@ -130,7 +130,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy()) .setMergeScheduler(new SerialMergeScheduler())); @@ -144,7 +144,7 @@ writer.close(); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy()) .setMergeScheduler(new SerialMergeScheduler())); } @@ -153,7 +153,7 @@ LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.setMergeFactor(10); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new SerialMergeScheduler())); // merge policy only fixes segments on levels where merges @@ -182,7 +182,7 @@ LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.setMergeFactor(100); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(10).setMergePolicy(ldmp)); for (int i = 0; i < 250; i++) { @@ -198,7 +198,7 @@ ldmp = new LogDocMergePolicy(); ldmp.setMergeFactor(5); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler())); // merge factor is changed, so check invariants after all adds Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java (working copy) @@ -112,7 +112,7 @@ MockDirectoryWrapper dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(4)) @@ -157,7 +157,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(10). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(4)) @@ -222,7 +222,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(4)) @@ -277,7 +277,7 @@ public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException { MockDirectoryWrapper dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()) + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler())); final Document doc = new Document(); doc.add(newField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); Index: lucene/src/test/org/apache/lucene/index/TestNRTThreads.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNRTThreads.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestNRTThreads.java (working copy) @@ -70,7 +70,7 @@ final LineFileDocs docs = new LineFileDocs(random); final File tempDir = _TestUtil.getTempDir("nrtopenfiles"); final MockDirectoryWrapper dir = new MockDirectoryWrapper(random, FSDirectory.open(tempDir)); - final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); conf.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() { @Override public void warm(IndexReader reader) throws IOException { Index: lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestAtomicUpdate.java (working copy) @@ -127,7 +127,7 @@ TimedThread[] threads = new TimedThread[4]; IndexWriterConfig conf = new IndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(7); ((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3); IndexWriter writer = new MockIndexWriter(directory, conf); Index: lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java (working copy) @@ -70,7 +70,7 @@ public void testAllCommitsRemain() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); for (int i = 0; i < 10; i++) { Document doc = new Document(); Index: lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java (working copy) @@ -60,7 +60,7 @@ public void testSimple() throws Exception { Directory dir = newDirectory(); - final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); Field field = newField("field", "", Field.Index.ANALYZED); doc.add(field); @@ -228,7 +228,7 @@ final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER; - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); // Sometimes swap in codec that impls ord(): if (random.nextInt(10) == 7) { @@ -331,7 +331,7 @@ final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER; - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); // Sometimes swap in codec that impls ord(): if (random.nextInt(10) == 7) { Index: lucene/src/test/org/apache/lucene/index/TestOmitNorms.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestOmitNorms.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestOmitNorms.java (working copy) @@ -32,7 +32,7 @@ // omitNorms bit in the FieldInfo public void testOmitNorms() throws Exception { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); Document d = new Document(); @@ -79,7 +79,7 @@ // omitNorms for the same field works public void testMixedMerge() throws Exception { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter( ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). @@ -137,7 +137,7 @@ // field, public void testMixedRAM() throws Exception { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter( ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). @@ -191,7 +191,7 @@ // Verifies no *.nrm exists when all fields omit norms: public void testNoNrmFile() throws Throwable { Directory ram = newDirectory(); - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy())); writer.setInfoStream(VERBOSE ? System.out : null); @@ -264,7 +264,7 @@ */ static byte[] getNorms(String field, Field f1, Field f2) throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()); RandomIndexWriter riw = new RandomIndexWriter(random, dir, iwc); // add f1 Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (working copy) @@ -65,7 +65,7 @@ public void testAddCloseOpen() throws IOException { Directory dir1 = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); IndexWriter writer = new IndexWriter(dir1, iwc); for (int i = 0; i < 97 ; i++) { @@ -99,7 +99,7 @@ assertTrue(reader.isCurrent()); writer.close(); assertTrue(reader.isCurrent()); // all changes are visible to the reader - iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); writer = new IndexWriter(dir1, iwc); assertTrue(reader.isCurrent()); writer.addDocument(createDocument(1, "x", 1+random.nextInt(5))); @@ -114,7 +114,7 @@ boolean optimize = true; Directory dir1 = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); if (iwc.getMaxBufferedDocs() < 20) { iwc.setMaxBufferedDocs(20); } @@ -165,7 +165,7 @@ assertEquals(0, count(new Term("id", id10), r3)); assertEquals(1, count(new Term("id", Integer.toString(8000)), r3)); - writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED)); writer.addDocument(doc); @@ -185,7 +185,7 @@ public void testIsCurrent() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); @@ -193,7 +193,7 @@ writer.addDocument(doc); writer.close(); - iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); writer = new IndexWriter(dir, iwc); doc = new Document(); doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED)); @@ -230,7 +230,7 @@ boolean optimize = false; Directory dir1 = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); if (iwc.getMaxBufferedDocs() < 20) { iwc.setMaxBufferedDocs(20); } @@ -249,7 +249,7 @@ // create a 2nd index Directory dir2 = newDirectory(); - IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer2.setInfoStream(infoStream); createIndexNoClose(!optimize, "index2", writer2); writer2.close(); @@ -287,12 +287,12 @@ boolean optimize = false; Directory dir1 = newDirectory(); - IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.setInfoStream(infoStream); // create a 2nd index Directory dir2 = newDirectory(); - IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer2.setInfoStream(infoStream); createIndexNoClose(!optimize, "index2", writer2); writer2.close(); @@ -321,7 +321,7 @@ boolean optimize = true; Directory dir1 = newDirectory(); - IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2)); + IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderTermsIndexDivisor(2)); writer.setInfoStream(infoStream); // create the index createIndexNoClose(!optimize, "index1", writer); @@ -359,7 +359,7 @@ writer.close(); // reopen the writer to verify the delete made it to the directory - writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.setInfoStream(infoStream); IndexReader w2r1 = writer.getReader(); assertEquals(0, count(new Term("id", id10), w2r1)); @@ -373,7 +373,7 @@ int numDirs = 3; Directory mainDir = newDirectory(); - IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); _TestUtil.reduceOpenFiles(mainWriter); mainWriter.setInfoStream(infoStream); @@ -418,7 +418,7 @@ this.numDirs = numDirs; this.mainWriter = mainWriter; addDir = newDirectory(); - IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)); for (int i = 0; i < NUM_INIT_DOCS; i++) { Document doc = createDocument(i, "addindex", 4); writer.addDocument(doc); @@ -527,7 +527,7 @@ */ public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception { Directory dir1 = newDirectory(); - IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.setInfoStream(infoStream); IndexReader r1 = writer.getReader(); assertEquals(0, r1.maxDoc()); @@ -564,7 +564,7 @@ writer.close(); // test whether the changes made it to the directory - writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); IndexReader w2r1 = writer.getReader(); // insure the deletes were actually flushed to the directory assertEquals(200, w2r1.maxDoc()); @@ -604,7 +604,7 @@ public static void createIndex(Random random, Directory dir1, String indexName, boolean multiSegment) throws IOException { IndexWriter w = new IndexWriter(dir1, LuceneTestCase.newIndexWriterConfig(random, - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMergePolicy(new LogDocMergePolicy())); for (int i = 0; i < 100; i++) { w.addDocument(createDocument(i, indexName, 4)); @@ -642,7 +642,7 @@ MyWarmer warmer = new MyWarmer(); IndexWriter writer = new IndexWriter( dir1, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setMergedSegmentWarmer(warmer). setMergeScheduler(new ConcurrentMergeScheduler()). @@ -678,7 +678,7 @@ public void testAfterCommit() throws Exception { Directory dir1 = newDirectory(); - IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler())); + IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler())); writer.commit(); writer.setInfoStream(infoStream); @@ -711,7 +711,7 @@ // Make sure reader remains usable even if IndexWriter closes public void testAfterClose() throws Exception { Directory dir1 = newDirectory(); - IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer.setInfoStream(infoStream); // create the index @@ -743,7 +743,7 @@ MockDirectoryWrapper dir1 = newDirectory(); final IndexWriter writer = new IndexWriter( dir1, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(2)) ); writer.setInfoStream(infoStream); @@ -830,7 +830,7 @@ Directory dir1 = newDirectory(); final IndexWriter writer = new IndexWriter( dir1, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(2)) ); writer.setInfoStream(infoStream); @@ -915,7 +915,7 @@ public void testExpungeDeletes() throws Throwable { Directory dir = newDirectory(); - final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED)); Field id = newField("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED); @@ -939,7 +939,7 @@ public void testDeletesNumDocs() throws Throwable { Directory dir = newDirectory(); - final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED)); Field id = newField("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED); @@ -969,7 +969,7 @@ public void testEmptyIndex() throws Exception { // Ensures that getReader works on an empty index, which hasn't been committed yet. Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); IndexReader r = w.getReader(); assertEquals(0, r.numDocs()); r.close(); @@ -982,7 +982,7 @@ final AtomicBoolean didWarm = new AtomicBoolean(); IndexWriter w = new IndexWriter( dir, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(2). setReaderPooling(true). setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() { @@ -1017,7 +1017,7 @@ illegalCodecs.add("SimpleText"); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setReaderTermsIndexDivisor(-1); + new MockAnalyzer(random)).setReaderTermsIndexDivisor(-1); // Don't proceed if picked Codec is in the list of illegal ones. if (illegalCodecs.contains(conf.getCodecProvider().getFieldCodec("f"))) return; Index: lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java (working copy) @@ -44,8 +44,8 @@ doc2 = new Document(); DocHelper.setupDoc(doc1); DocHelper.setupDoc(doc2); - DocHelper.writeDoc(dir, doc1); - DocHelper.writeDoc(dir, doc2); + DocHelper.writeDoc(random, dir, doc1); + DocHelper.writeDoc(random, dir, doc2); sis = new SegmentInfos(); sis.read(dir); } @@ -199,7 +199,7 @@ private void addDoc(Random random, Directory ramDir1, String s, boolean create) throws IOException { IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( create ? OpenMode.CREATE : OpenMode.APPEND)); Document doc = new Document(); doc.add(newField("body", s, Field.Store.YES, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (working copy) @@ -44,7 +44,7 @@ Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); for (int i = 0; i < keywords.length; i++) { Document doc = new Document(); @@ -79,7 +79,7 @@ Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); modifier.setInfoStream(VERBOSE ? System.out : null); int id = 0; @@ -113,7 +113,7 @@ public void testMaxBufferedDeletes() throws IOException { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); writer.setInfoStream(VERBOSE ? System.out : null); writer.addDocument(new Document()); @@ -133,7 +133,7 @@ } Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4) .setMaxBufferedDeleteTerms(4)); modifier.setInfoStream(VERBOSE ? System.out : null); int id = 0; @@ -172,7 +172,7 @@ public void testBothDeletes() throws IOException { Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100) .setMaxBufferedDeleteTerms(100)); int id = 0; @@ -206,7 +206,7 @@ public void testBatchDeletes() throws IOException { Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -249,7 +249,7 @@ public void testDeleteAll() throws IOException { Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -295,7 +295,7 @@ public void testDeleteAllRollback() throws IOException { Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -332,7 +332,7 @@ public void testDeleteAllNRT() throws IOException { Directory dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -423,7 +423,7 @@ MockDirectoryWrapper startDir = newDirectory(); // TODO: find the resource leak that only occurs sometimes here. startDir.setNoDeleteOpenFile(false); - IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); for (int i = 0; i < 157; i++) { Document d = new Document(); d.add(newField("id", Integer.toString(i), Field.Store.YES, @@ -450,7 +450,7 @@ dir.setPreventDoubleWrite(false); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)) .setMaxBufferedDocs(1000) .setMaxBufferedDeleteTerms(1000) .setMergeScheduler(new ConcurrentMergeScheduler())); @@ -682,7 +682,7 @@ MockDirectoryWrapper dir = newDirectory(); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy())); modifier.setInfoStream(VERBOSE ? System.out : null); LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy(); @@ -809,7 +809,7 @@ String[] text = { "Amsterdam", "Venice" }; MockDirectoryWrapper dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); modifier.commit(); dir.failOn(failure.reset()); @@ -837,7 +837,7 @@ public void testDeleteNullQuery() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); for (int i = 0; i < 5; i++) { addDoc(modifier, i, 2*i); Index: lucene/src/test/org/apache/lucene/index/TestFieldsReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (working copy) @@ -51,7 +51,7 @@ DocHelper.setupDoc(testDoc); _TestUtil.add(testDoc, fieldInfos); dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()); ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false); IndexWriter writer = new IndexWriter(dir, conf); writer.addDocument(testDoc); @@ -291,7 +291,7 @@ Directory tmpDir = newFSDirectory(file); assertTrue(tmpDir != null); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy()); ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false); IndexWriter writer = new IndexWriter(tmpDir, conf); writer.addDocument(testDoc); @@ -478,7 +478,7 @@ try { Directory dir = new FaultyFSDirectory(indexDir); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); for(int i=0;i<2;i++) writer.addDocument(testDoc); writer.optimize(); Index: lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestRollingUpdates.java (working copy) @@ -34,7 +34,7 @@ final LineFileDocs docs = new LineFileDocs(random); - final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); final int SIZE = 200 * RANDOM_MULTIPLIER; int id = 0; IndexReader r = null; Index: lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (working copy) @@ -32,7 +32,7 @@ Directory mainDir = newDirectory(); IndexWriter writer = new IndexWriter( mainDir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(10). setMergePolicy(newLogMergePolicy(false,2)) ); Index: lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java (working copy) @@ -64,7 +64,7 @@ throw new RuntimeException("Couldn't find commit point "+id); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy( + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy( new RollbackDeletionPolicy(id)).setIndexCommit(last)); Map data = new HashMap(); data.put("index", "Rolled back to 1-"+id); @@ -127,7 +127,7 @@ dir = newDirectory(); //Build index, of records 1 to 100, committing after each batch of 10 IndexDeletionPolicy sdp=new KeepAllDeletionPolicy(); - IndexWriter w=new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(sdp)); + IndexWriter w=new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(sdp)); for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) { Document doc=new Document(); doc.add(newField(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED)); @@ -201,7 +201,7 @@ for(int i=0;i<2;i++) { // Unless you specify a prior commit point, rollback // should not work: - new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()) + new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setIndexDeletionPolicy(new DeleteLastCommitPolicy())).close(); IndexReader r = IndexReader.open(dir, true); assertEquals(100, r.numDocs()); Index: lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (working copy) @@ -126,7 +126,7 @@ */ public void testFilterIndexReader() throws Exception { Directory directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d1 = new Document(); d1.add(newField("default","one two", Field.Store.YES, Field.Index.ANALYZED)); @@ -143,7 +143,7 @@ writer.close(); Directory target = newDirectory(); - writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); IndexReader reader = new TestReader(IndexReader.open(directory, true)); writer.addIndexes(reader); writer.close(); Index: lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (working copy) @@ -173,7 +173,7 @@ private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException { IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode( + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode( OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newLogMergePolicy())); iwriter.commit(); IndexReader reader = IndexReader.open(dir, false); @@ -700,7 +700,7 @@ final Directory dir = newDirectory(); final int n = 30 * RANDOM_MULTIPLIER; IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < n; i++) { writer.addDocument(createDocument(i, 3)); } @@ -721,7 +721,7 @@ modifier.close(); } else { IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); modifier.addDocument(createDocument(n + i, 6)); modifier.close(); } @@ -937,7 +937,7 @@ public static void createIndex(Random random, Directory dir, boolean multiSegment) throws IOException { IndexWriter.unlock(dir); IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random, - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMergePolicy(new LogDocMergePolicy())); for (int i = 0; i < 100; i++) { @@ -984,7 +984,7 @@ if (VERBOSE) { System.out.println("TEST: modify index"); } - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.setInfoStream(VERBOSE ? System.out : null); w.deleteDocuments(new Term("field2", "a11")); w.deleteDocuments(new Term("field2", "b30")); @@ -1001,13 +1001,13 @@ break; } case 2: { - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.optimize(); w.close(); break; } case 3: { - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.addDocument(createDocument(101, 4)); w.optimize(); w.addDocument(createDocument(102, 4)); @@ -1024,7 +1024,7 @@ break; } case 5: { - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); w.addDocument(createDocument(101, 4)); w.close(); break; @@ -1188,7 +1188,7 @@ Directory dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setIndexDeletionPolicy(new KeepAllCommits()). setMaxBufferedDocs(-1). setMergePolicy(newLogMergePolicy(10)) Index: lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (working copy) @@ -47,9 +47,9 @@ merge1Dir = newDirectory(); merge2Dir = newDirectory(); DocHelper.setupDoc(doc1); - SegmentInfo info1 = DocHelper.writeDoc(merge1Dir, doc1); + SegmentInfo info1 = DocHelper.writeDoc(random, merge1Dir, doc1); DocHelper.setupDoc(doc2); - SegmentInfo info2 = DocHelper.writeDoc(merge2Dir, doc2); + SegmentInfo info2 = DocHelper.writeDoc(random, merge2Dir, doc2); reader1 = SegmentReader.get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR); reader2 = SegmentReader.get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR); } Index: lucene/src/test/org/apache/lucene/index/TestAddIndexes.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (working copy) @@ -53,7 +53,7 @@ IndexWriter writer = null; writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setOpenMode(OpenMode.CREATE)); writer.setInfoStream(VERBOSE ? System.out : null); // add 100 documents @@ -64,7 +64,7 @@ writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMergePolicy(newLogMergePolicy(false)) ); @@ -73,14 +73,14 @@ assertEquals(40, writer.maxDoc()); writer.close(); - writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); // add 40 documents in compound files addDocs2(writer, 50); assertEquals(50, writer.maxDoc()); writer.close(); // test doc count before segments are merged - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); assertEquals(100, writer.maxDoc()); writer.addIndexes(aux, aux2); assertEquals(190, writer.maxDoc()); @@ -95,14 +95,14 @@ // now add another set in. Directory aux3 = newDirectory(); - writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); // add 40 documents addDocs(writer, 40); assertEquals(40, writer.maxDoc()); writer.close(); // test doc count before segments are merged/index is optimized - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); assertEquals(190, writer.maxDoc()); writer.addIndexes(aux3); assertEquals(230, writer.maxDoc()); @@ -116,7 +116,7 @@ verifyTermDocs(dir, new Term("content", "bbb"), 50); // now optimize it. - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); writer.optimize(); writer.close(); @@ -129,11 +129,11 @@ // now add a single document Directory aux4 = newDirectory(); - writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocs2(writer, 1); writer.close(); - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); assertEquals(230, writer.maxDoc()); writer.addIndexes(aux4); assertEquals(231, writer.maxDoc()); @@ -156,7 +156,7 @@ Directory aux = newDirectory(); setUpDirs(dir, aux); - IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); writer.setInfoStream(VERBOSE ? System.out : null); writer.addIndexes(aux); @@ -194,7 +194,7 @@ Directory aux = newDirectory(); setUpDirs(dir, aux); - IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: @@ -232,7 +232,7 @@ Directory aux = newDirectory(); setUpDirs(dir, aux); - IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: @@ -273,7 +273,7 @@ IndexWriter writer = null; - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); // add 100 documents addDocs(writer, 100); assertEquals(100, writer.maxDoc()); @@ -281,7 +281,7 @@ writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false)) @@ -291,7 +291,7 @@ writer.close(); writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false)) @@ -299,7 +299,7 @@ addDocs(writer, 100); writer.close(); - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); try { // cannot add self writer.addIndexes(aux, dir); @@ -329,7 +329,7 @@ IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(10). setMergePolicy(newLogMergePolicy(4)) @@ -358,7 +358,7 @@ IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(9). setMergePolicy(newLogMergePolicy(4)) @@ -387,7 +387,7 @@ IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(10). setMergePolicy(newLogMergePolicy(4)) @@ -422,7 +422,7 @@ IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(4). setMergePolicy(newLogMergePolicy(4)) @@ -448,7 +448,7 @@ IndexWriter writer = newWriter( aux2, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(100). setMergePolicy(newLogMergePolicy(10)) @@ -475,7 +475,7 @@ writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(6). setMergePolicy(newLogMergePolicy(4)) @@ -536,7 +536,7 @@ private void setUpDirs(Directory dir, Directory aux) throws IOException { IndexWriter writer = null; - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000)); + writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000)); // add 1000 documents in 1 segment addDocs(writer, 1000); assertEquals(1000, writer.maxDoc()); @@ -545,7 +545,7 @@ writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false, 10)) @@ -556,7 +556,7 @@ writer.close(); writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false, 10)) @@ -575,7 +575,7 @@ lmp.setUseCompoundFile(false); lmp.setMergeFactor(100); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(5).setMergePolicy(lmp)); Document doc = new Document(); @@ -603,7 +603,7 @@ lmp.setUseCompoundFile(false); lmp.setMergeFactor(4); writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp)); writer.addIndexes(dir); writer.close(); @@ -636,14 +636,14 @@ NUM_COPY = numCopy; dir = new MockDirectoryWrapper(random, new RAMDirectory()); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setMaxBufferedDocs(2)); for (int i = 0; i < NUM_INIT_DOCS; i++) addDoc(writer); writer.close(); dir2 = newDirectory(); - writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); writer2.setInfoStream(VERBOSE ? System.out : null); writer2.commit(); @@ -944,7 +944,7 @@ Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.length; i++) { dirs[i] = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); IndexWriter writer = new IndexWriter(dirs[i], conf); Document doc = new Document(); doc.add(new Field("id", "myid", Store.NO, Index.NOT_ANALYZED_NO_NORMS)); @@ -952,7 +952,7 @@ writer.close(); } - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); IndexWriter writer = new IndexWriter(dirs[0], conf); // Now delete the document @@ -992,7 +992,7 @@ IndexWriter writer = null; writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setCodecProvider( + new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodecProvider( provider)); // add 100 documents addDocs3(writer, 100); @@ -1003,7 +1003,7 @@ writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setCodecProvider(provider). setMaxBufferedDocs(10). @@ -1017,7 +1017,7 @@ writer = newWriter( aux2, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setCodecProvider(provider) ); @@ -1030,7 +1030,7 @@ // test doc count before segments are merged writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.APPEND). setCodecProvider(provider) ); @@ -1063,7 +1063,7 @@ Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.length; i++) { dirs[i] = new RAMDirectory(); - IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d = new Document(); d.add(new Field("c", "v", Store.YES, Index.ANALYZED, TermVector.YES)); w.addDocument(d); @@ -1073,7 +1073,7 @@ IndexReader[] readers = new IndexReader[] { IndexReader.open(dirs[0]), IndexReader.open(dirs[1]) }; Directory dir = new RAMDirectory(); - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()); LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy(); lmp.setUseCompoundFile(true); lmp.setNoCFSRatio(1.0); // Force creation of CFS Index: lucene/src/test/org/apache/lucene/index/TestStressIndexing.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestStressIndexing.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestStressIndexing.java (working copy) @@ -114,7 +114,7 @@ */ public void runStressTest(Directory directory, MergeScheduler mergeScheduler) throws Exception { IndexWriter modifier = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergeScheduler( mergeScheduler)); modifier.commit(); Index: lucene/src/test/org/apache/lucene/index/TestPayloads.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestPayloads.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestPayloads.java (working copy) @@ -479,7 +479,7 @@ Directory dir = newDirectory(); final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); final String field = "test"; Thread[] ingesters = new Thread[numThreads]; @@ -600,16 +600,16 @@ public void testAcrossFields() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, - new MockAnalyzer(MockTokenizer.WHITESPACE, true, true)); + new MockAnalyzer(random, MockTokenizer.WHITESPACE, true)); Document doc = new Document(); - doc.add(new Field("haspayload", "here we go", Field.Store.YES, Field.Index.ANALYZED)); + doc.add(new Field("hasMaybepayload", "here we go", Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc); writer.close(); writer = new RandomIndexWriter(random, dir, - new MockAnalyzer(MockTokenizer.WHITESPACE, true, false)); + new MockAnalyzer(random, MockTokenizer.WHITESPACE, true)); doc = new Document(); - doc.add(new Field("nopayload", "here we go", Field.Store.YES, Field.Index.ANALYZED)); + doc.add(new Field("hasMaybepayload2", "here we go", Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc); writer.addDocument(doc); writer.optimize(); Index: lucene/src/test/org/apache/lucene/index/TestTieredMergePolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTieredMergePolicy.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestTieredMergePolicy.java (working copy) @@ -28,7 +28,7 @@ public void testExpungeDeletes() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); TieredMergePolicy tmp = newTieredMergePolicy(); conf.setMergePolicy(tmp); conf.setMaxBufferedDocs(4); @@ -71,7 +71,7 @@ System.out.println("TEST: iter=" + iter); } Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); conf.setMergeScheduler(new SerialMergeScheduler()); TieredMergePolicy tmp = newTieredMergePolicy(); conf.setMergePolicy(tmp); Index: lucene/src/test/org/apache/lucene/index/TestLazyBug.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestLazyBug.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestLazyBug.java (working copy) @@ -63,7 +63,7 @@ Directory dir = newDirectory(); try { IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); lmp.setUseCompoundFile(false); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (working copy) @@ -57,7 +57,7 @@ IndexWriter writer = new IndexWriter( merged, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(2)) ); writer.setInfoStream(VERBOSE ? System.out : null); @@ -98,7 +98,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(2)) Index: lucene/src/test/org/apache/lucene/index/Test2BTerms.java =================================================================== --- lucene/src/test/org/apache/lucene/index/Test2BTerms.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/Test2BTerms.java (working copy) @@ -138,7 +138,7 @@ Directory dir = newFSDirectory(_TestUtil.getTempDir("2BTerms")); IndexWriter w = new IndexWriter( dir, - new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH). setRAMBufferSizeMB(256.0). setMergeScheduler(new ConcurrentMergeScheduler()). Index: lucene/src/test/org/apache/lucene/index/TestSegmentInfo.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentInfo.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestSegmentInfo.java (working copy) @@ -29,7 +29,7 @@ public void testSizeInBytesCache() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()); IndexWriter writer = new IndexWriter(dir, conf); writer.setInfoStream(VERBOSE ? System.out : null); Document doc = new Document(); Index: lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java (working copy) @@ -39,7 +39,7 @@ //IndexWriter.debug2 = System.out; Directory dir = new MockDirectoryWrapper(new Random(random.nextLong()), new RAMDirectory()); IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_CURRENT, - new MockAnalyzer()); + new MockAnalyzer(random)); iwc.setMergeScheduler(new SerialMergeScheduler()); iwc.setMaxBufferedDocs(5000); iwc.setRAMBufferSizeMB(100); Index: lucene/src/test/org/apache/lucene/index/TestParallelReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestParallelReader.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestParallelReader.java (working copy) @@ -119,7 +119,7 @@ // one document only: Directory dir2 = newDirectory(); - IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d3 = new Document(); d3.add(newField("f3", "v1", Field.Store.YES, Field.Index.ANALYZED)); w2.addDocument(d3); @@ -175,7 +175,7 @@ // add another document to ensure that the indexes are not optimized IndexWriter modifier = new IndexWriter( dir1, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(10)) ); Document d = new Document(); @@ -185,7 +185,7 @@ modifier = new IndexWriter( dir2, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setMergePolicy(newLogMergePolicy(10)) ); d = new Document(); @@ -200,7 +200,7 @@ assertFalse(pr.isOptimized()); pr.close(); - modifier = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + modifier = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); modifier.optimize(); modifier.close(); @@ -212,7 +212,7 @@ pr.close(); - modifier = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + modifier = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); modifier.optimize(); modifier.close(); @@ -244,7 +244,7 @@ // Fields 1-4 indexed together: private IndexSearcher single(Random random) throws IOException { dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d1 = new Document(); d1.add(newField("f1", "v1", Field.Store.YES, Field.Index.ANALYZED)); d1.add(newField("f2", "v1", Field.Store.YES, Field.Index.ANALYZED)); @@ -274,7 +274,7 @@ private Directory getDir1(Random random) throws IOException { Directory dir1 = newDirectory(); - IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d1 = new Document(); d1.add(newField("f1", "v1", Field.Store.YES, Field.Index.ANALYZED)); d1.add(newField("f2", "v1", Field.Store.YES, Field.Index.ANALYZED)); @@ -289,7 +289,7 @@ private Directory getDir2(Random random) throws IOException { Directory dir2 = newDirectory(); - IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document d3 = new Document(); d3.add(newField("f3", "v1", Field.Store.YES, Field.Index.ANALYZED)); d3.add(newField("f4", "v1", Field.Store.YES, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/index/codecs/preflex/TestSurrogates.java =================================================================== --- lucene/src/test/org/apache/lucene/index/codecs/preflex/TestSurrogates.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/codecs/preflex/TestSurrogates.java (working copy) @@ -275,7 +275,7 @@ RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec(new PreFlexRWCodec()))); + new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec(new PreFlexRWCodec()))); final int numField = _TestUtil.nextInt(random, 2, 5); Index: lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (revision 1091052) +++ lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (working copy) @@ -204,7 +204,7 @@ Directory dir = newDirectory(); ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()) + new MockAnalyzer(random)) .setIndexDeletionPolicy(policy); MergePolicy mp = conf.getMergePolicy(); if (mp instanceof LogMergePolicy) { @@ -221,7 +221,7 @@ // past commits lastDeleteTime = System.currentTimeMillis(); conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setIndexDeletionPolicy(policy); mp = conf.getMergePolicy(); if (mp instanceof LogMergePolicy) { @@ -303,7 +303,7 @@ policy.dir = dir; IndexWriterConfig conf = newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10) .setMergeScheduler(new SerialMergeScheduler()); MergePolicy mp = conf.getMergePolicy(); @@ -324,7 +324,7 @@ } if (!isOptimized) { conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setIndexDeletionPolicy(policy); mp = conf.getMergePolicy(); if (mp instanceof LogMergePolicy) { @@ -373,7 +373,7 @@ int preCount = dir.listAll().length; writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer()).setOpenMode( + new MockAnalyzer(random)).setOpenMode( OpenMode.APPEND).setIndexDeletionPolicy(policy)); writer.close(); int postCount = dir.listAll().length; @@ -397,7 +397,7 @@ IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setIndexDeletionPolicy(policy). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(10)) @@ -419,7 +419,7 @@ assertTrue(lastCommit != null); // Now add 1 doc and optimize - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy)); addDoc(writer); assertEquals(11, writer.numDocs()); writer.optimize(); @@ -428,7 +428,7 @@ assertEquals(6, IndexReader.listCommits(dir).size()); // Now open writer on the commit just before optimize: - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit)); assertEquals(10, writer.numDocs()); @@ -441,7 +441,7 @@ assertEquals(11, r.numDocs()); r.close(); - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit)); assertEquals(10, writer.numDocs()); // Commits the rollback: @@ -458,7 +458,7 @@ r.close(); // Reoptimize - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy)); writer.optimize(); writer.close(); @@ -469,7 +469,7 @@ // Now open writer on the commit just before optimize, // but this time keeping only the last commit: - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexCommit(lastCommit)); + writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexCommit(lastCommit)); assertEquals(10, writer.numDocs()); // Reader still sees optimized index, because writer @@ -505,7 +505,7 @@ Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()) + TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy) .setMaxBufferedDocs(10); MergePolicy mp = conf.getMergePolicy(); @@ -518,7 +518,7 @@ } writer.close(); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()) + conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy); mp = conf.getMergePolicy(); if (mp instanceof LogMergePolicy) { @@ -558,7 +558,7 @@ for(int j=0;j previousMappings = new HashMap(); /** - * Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean) - * MockAnalyzer(runAutomaton, lowerCase, filter, enablePositionIncrements, true}). - */ - public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase, CharacterRunAutomaton filter, boolean enablePositionIncrements) { - this(runAutomaton, lowerCase, filter, enablePositionIncrements, true); - } - - /** * Creates a new MockAnalyzer. * + * @param random Random for payloads behavior * @param runAutomaton DFA describing how tokenization should happen (e.g. [a-zA-Z]+) * @param lowerCase true if the tokenizer should lowercase terms * @param filter DFA describing how terms should be filtered (set of stopwords, etc) * @param enablePositionIncrements true if position increments should reflect filtered terms. * @param payload if payloads should be added containing the positions (for testing) */ - public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase, CharacterRunAutomaton filter, boolean enablePositionIncrements, boolean payload) { + public MockAnalyzer(Random random, CharacterRunAutomaton runAutomaton, boolean lowerCase, CharacterRunAutomaton filter, boolean enablePositionIncrements) { + this.random = random; this.runAutomaton = runAutomaton; this.lowerCase = lowerCase; this.filter = filter; this.enablePositionIncrements = enablePositionIncrements; - this.payload = payload; } /** - * Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean) - * MockAnalyzer(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, true}). + * Calls {@link #MockAnalyzer(Random, CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean) + * MockAnalyzer(random, runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false}). */ - public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase) { - this(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, true); + public MockAnalyzer(Random random, CharacterRunAutomaton runAutomaton, boolean lowerCase) { + this(random, runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false); } - /** - * Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean) - * MockAnalyzer(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, payload}). - */ - public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase, boolean payload) { - this(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, payload); - } - /** * Create a Whitespace-lowercasing analyzer with no stopwords removal. *

- * Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean) - * MockAnalyzer(MockTokenizer.WHITESPACE, true, MockTokenFilter.EMPTY_STOPSET, false, true}). + * Calls {@link #MockAnalyzer(Random, CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean) + * MockAnalyzer(random, MockTokenizer.WHITESPACE, true, MockTokenFilter.EMPTY_STOPSET, false}). */ - public MockAnalyzer() { - this(MockTokenizer.WHITESPACE, true); + public MockAnalyzer(Random random) { + this(random, MockTokenizer.WHITESPACE, true); } @Override public TokenStream tokenStream(String fieldName, Reader reader) { MockTokenizer tokenizer = new MockTokenizer(reader, runAutomaton, lowerCase); TokenFilter filt = new MockTokenFilter(tokenizer, filter, enablePositionIncrements); - if (payload){ - filt = new SimplePayloadFilter(filt, fieldName); - } + filt = maybePayload(filt, fieldName); return filt; } @@ -105,15 +89,19 @@ @Override public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { - SavedStreams saved = (SavedStreams) getPreviousTokenStream(); + Map map = (Map) getPreviousTokenStream(); + if (map == null) { + map = new HashMap(); + setPreviousTokenStream(map); + } + + SavedStreams saved = map.get(fieldName); if (saved == null) { saved = new SavedStreams(); saved.tokenizer = new MockTokenizer(reader, runAutomaton, lowerCase); saved.filter = new MockTokenFilter(saved.tokenizer, filter, enablePositionIncrements); - if (payload){ - saved.filter = new SimplePayloadFilter(saved.filter, fieldName); - } - setPreviousTokenStream(saved); + saved.filter = maybePayload(saved.filter, fieldName); + map.put(fieldName, saved); return saved.filter; } else { saved.tokenizer.reset(reader); @@ -122,6 +110,28 @@ } } + private synchronized TokenFilter maybePayload(TokenFilter stream, String fieldName) { + Integer val = previousMappings.get(fieldName); + if (val == null) { + switch(random.nextInt(3)) { + case 0: val = -1; // no payloads + break; + case 1: val = Integer.MAX_VALUE; // variable length payload + break; + case 2: val = random.nextInt(12); // fixed length payload + break; + } + previousMappings.put(fieldName, val); // save it so we are consistent for this field + } + + if (val == -1) + return stream; + else if (val == Integer.MAX_VALUE) + return new MockVariableLengthPayloadFilter(random, stream); + else + return new MockFixedLengthPayloadFilter(random, stream, val); + } + public void setPositionIncrementGap(int positionIncrementGap){ this.positionIncrementGap = positionIncrementGap; } @@ -131,35 +141,3 @@ return positionIncrementGap; } } - -final class SimplePayloadFilter extends TokenFilter { - String fieldName; - int pos; - final PayloadAttribute payloadAttr; - final CharTermAttribute termAttr; - - public SimplePayloadFilter(TokenStream input, String fieldName) { - super(input); - this.fieldName = fieldName; - pos = 0; - payloadAttr = input.addAttribute(PayloadAttribute.class); - termAttr = input.addAttribute(CharTermAttribute.class); - } - - @Override - public boolean incrementToken() throws IOException { - if (input.incrementToken()) { - payloadAttr.setPayload(new Payload(("pos: " + pos).getBytes())); - pos++; - return true; - } else { - return false; - } - } - - @Override - public void reset() throws IOException { - super.reset(); - pos = 0; - } -} Index: lucene/src/test-framework/org/apache/lucene/analysis/MockVariableLengthPayloadFilter.java =================================================================== --- lucene/src/test-framework/org/apache/lucene/analysis/MockVariableLengthPayloadFilter.java (revision 0) +++ lucene/src/test-framework/org/apache/lucene/analysis/MockVariableLengthPayloadFilter.java (revision 0) @@ -0,0 +1,51 @@ +package org.apache.lucene.analysis; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.Random; + +import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; +import org.apache.lucene.index.Payload; + +final class MockVariableLengthPayloadFilter extends TokenFilter { + private static final int MAXLENGTH = 129; + + private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class); + private final Random random; + private final byte[] bytes = new byte[MAXLENGTH]; + private final Payload payload; + + MockVariableLengthPayloadFilter(Random random, TokenStream in) { + super(in); + this.random = random; + this.payload = new Payload(bytes); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + random.nextBytes(bytes); + payload.setData(bytes, 0, random.nextInt(MAXLENGTH)); + payloadAtt.setPayload(payload); + return true; + } else { + return false; + } + } +} Property changes on: lucene\src\test-framework\org\apache\lucene\analysis\MockVariableLengthPayloadFilter.java ___________________________________________________________________ Added: svn:eol-style + native Index: lucene/src/test-framework/org/apache/lucene/analysis/MockFixedLengthPayloadFilter.java =================================================================== --- lucene/src/test-framework/org/apache/lucene/analysis/MockFixedLengthPayloadFilter.java (revision 0) +++ lucene/src/test-framework/org/apache/lucene/analysis/MockFixedLengthPayloadFilter.java (revision 0) @@ -0,0 +1,49 @@ +package org.apache.lucene.analysis; + +/** + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.IOException; +import java.util.Random; + +import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; +import org.apache.lucene.index.Payload; + +final class MockFixedLengthPayloadFilter extends TokenFilter { + private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class); + private final Random random; + private final byte[] bytes; + private final Payload payload; + + MockFixedLengthPayloadFilter(Random random, TokenStream in, int length) { + super(in); + this.random = random; + this.bytes = new byte[length]; + this.payload = new Payload(bytes); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + random.nextBytes(bytes); + payloadAtt.setPayload(payload); + return true; + } else { + return false; + } + } +} Property changes on: lucene\src\test-framework\org\apache\lucene\analysis\MockFixedLengthPayloadFilter.java ___________________________________________________________________ Added: svn:eol-style + native Index: lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java =================================================================== --- lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java (revision 1091052) +++ lucene/src/test-framework/org/apache/lucene/search/QueryUtils.java (working copy) @@ -166,7 +166,7 @@ throws IOException { Directory d = new MockDirectoryWrapper(random, new RAMDirectory()); IndexWriter w = new IndexWriter(d, new IndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < numDeletedDocs; i++) { w.addDocument(new Document()); } Index: lucene/src/test-framework/org/apache/lucene/index/DocHelper.java =================================================================== --- lucene/src/test-framework/org/apache/lucene/index/DocHelper.java (revision 1091052) +++ lucene/src/test-framework/org/apache/lucene/index/DocHelper.java (working copy) @@ -21,6 +21,7 @@ import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Map; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -30,6 +31,8 @@ import org.apache.lucene.document.Fieldable; import org.apache.lucene.search.SimilarityProvider; import org.apache.lucene.store.Directory; +import org.apache.lucene.util.LuceneTestCase; + import static org.apache.lucene.util.LuceneTestCase.TEST_VERSION_CURRENT; class DocHelper { @@ -218,9 +221,9 @@ * @param doc * @throws IOException */ - public static SegmentInfo writeDoc(Directory dir, Document doc) throws IOException + public static SegmentInfo writeDoc(Random random, Directory dir, Document doc) throws IOException { - return writeDoc(dir, new MockAnalyzer(MockTokenizer.WHITESPACE, false), null, doc); + return writeDoc(random, dir, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), null, doc); } /** @@ -233,8 +236,8 @@ * @param doc * @throws IOException */ - public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, SimilarityProvider similarity, Document doc) throws IOException { - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + public static SegmentInfo writeDoc(Random random, Directory dir, Analyzer analyzer, SimilarityProvider similarity, Document doc) throws IOException { + IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( /* LuceneTestCase.newIndexWriterConfig(random, */ TEST_VERSION_CURRENT, analyzer).setSimilarityProvider(similarity)); //writer.setUseCompoundFile(false); writer.addDocument(doc); Index: lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java =================================================================== --- lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java (revision 1091052) +++ lucene/src/test-framework/org/apache/lucene/index/RandomIndexWriter.java (working copy) @@ -68,7 +68,7 @@ /** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer */ public RandomIndexWriter(Random r, Directory dir) throws IOException { - this(r, dir, LuceneTestCase.newIndexWriterConfig(r, LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer())); + this(r, dir, LuceneTestCase.newIndexWriterConfig(r, LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(r))); } /** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT */ Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java (revision 1091052) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/ext/TestExtendableQueryParser.java (working copy) @@ -43,7 +43,7 @@ public QueryParser getParser(Analyzer a, Extensions extensions) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); QueryParser qp = extensions == null ? new ExtendableQueryParser( TEST_VERSION_CURRENT, "field", a) : new ExtendableQueryParser( TEST_VERSION_CURRENT, "field", a, extensions); Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (revision 1091052) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (working copy) @@ -80,7 +80,7 @@ String[] fields = { "b", "t" }; StandardQueryParser mfqp = new StandardQueryParser(); mfqp.setMultiFields(fields); - mfqp.setAnalyzer(new MockAnalyzer()); + mfqp.setAnalyzer(new MockAnalyzer(random)); Query q = mfqp.parse("one", null); assertEquals("b:one t:one", q.toString()); @@ -150,7 +150,7 @@ StandardQueryParser mfqp = new StandardQueryParser(); mfqp.setMultiFields(fields); mfqp.setFieldsBoost(boosts); - mfqp.setAnalyzer(new MockAnalyzer()); + mfqp.setAnalyzer(new MockAnalyzer(random)); // Check for simple Query q = mfqp.parse("one", null); @@ -178,24 +178,24 @@ public void testStaticMethod1() throws QueryNodeException { String[] fields = { "b", "t" }; String[] queries = { "one", "two" }; - Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer()); + Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer(random)); assertEquals("b:one t:two", q.toString()); String[] queries2 = { "+one", "+two" }; - q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer()); + q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer(random)); assertEquals("(+b:one) (+t:two)", q.toString()); String[] queries3 = { "one", "+two" }; - q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer()); + q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer(random)); assertEquals("b:one (+t:two)", q.toString()); String[] queries4 = { "one +more", "+two" }; - q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer()); + q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer(random)); assertEquals("(b:one +b:more) (+t:two)", q.toString()); String[] queries5 = { "blah" }; try { - q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer()); + q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer(random)); fail(); } catch (IllegalArgumentException e) { // expected exception, array length differs @@ -219,15 +219,15 @@ BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT }; Query q = QueryParserUtil.parse("one", fields, flags, - new MockAnalyzer()); + new MockAnalyzer(random)); assertEquals("+b:one -t:one", q.toString()); - q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer()); + q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random)); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; - q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer()); + q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random)); fail(); } catch (IllegalArgumentException e) { // expected exception, array length differs @@ -240,19 +240,19 @@ BooleanClause.Occur.MUST_NOT }; StandardQueryParser parser = new StandardQueryParser(); parser.setMultiFields(fields); - parser.setAnalyzer(new MockAnalyzer()); + parser.setAnalyzer(new MockAnalyzer(random)); Query q = QueryParserUtil.parse("one", fields, flags, - new MockAnalyzer());// , fields, flags, new + new MockAnalyzer(random));// , fields, flags, new // MockAnalyzer()); assertEquals("+b:one -t:one", q.toString()); - q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer()); + q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random)); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; - q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer()); + q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random)); fail(); } catch (IllegalArgumentException e) { // expected exception, array length differs @@ -265,13 +265,13 @@ BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD }; Query q = QueryParserUtil.parse(queries, fields, flags, - new MockAnalyzer()); + new MockAnalyzer(random)); assertEquals("+f1:one -f2:two f3:three", q.toString()); try { BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; q = QueryParserUtil - .parse(queries, fields, flags2, new MockAnalyzer()); + .parse(queries, fields, flags2, new MockAnalyzer(random)); fail(); } catch (IllegalArgumentException e) { // expected exception, array length differs @@ -284,13 +284,13 @@ BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT }; Query q = QueryParserUtil.parse(queries, fields, flags, - new MockAnalyzer()); + new MockAnalyzer(random)); assertEquals("+b:one -t:two", q.toString()); try { BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST }; q = QueryParserUtil - .parse(queries, fields, flags2, new MockAnalyzer()); + .parse(queries, fields, flags2, new MockAnalyzer(random)); fail(); } catch (IllegalArgumentException e) { // expected exception, array length differs @@ -316,7 +316,7 @@ } public void testStopWordSearching() throws Exception { - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); Directory ramDir = newDirectory(); IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); @@ -342,7 +342,7 @@ * Return empty tokens for field "f1". */ private static final class AnalyzerReturningNull extends Analyzer { - MockAnalyzer stdAnalyzer = new MockAnalyzer(); + MockAnalyzer stdAnalyzer = new MockAnalyzer(random); public AnalyzerReturningNull() { } Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (revision 1091052) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (working copy) @@ -191,7 +191,7 @@ public StandardQueryParser getParser(Analyzer a) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); StandardQueryParser qp = new StandardQueryParser(); qp.setAnalyzer(a); @@ -281,7 +281,7 @@ public Query getQueryDOA(String query, Analyzer a) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); StandardQueryParser qp = new StandardQueryParser(); qp.setAnalyzer(a); qp.setDefaultOperator(Operator.AND); @@ -301,7 +301,7 @@ } public void testConstantScoreAutoRewrite() throws Exception { - StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); Query q = qp.parse("foo*bar", "field"); assertTrue(q instanceof WildcardQuery); assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery) q).getRewriteMethod()); @@ -410,9 +410,9 @@ public void testSimple() throws Exception { assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2"); assertQueryEquals("term term term", null, "term term term"); - assertQueryEquals("t�rm term term", new MockAnalyzer(MockTokenizer.WHITESPACE, false), + assertQueryEquals("t�rm term term", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), "t�rm term term"); - assertQueryEquals("�mlaut", new MockAnalyzer(MockTokenizer.WHITESPACE, false), "�mlaut"); + assertQueryEquals("�mlaut", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), "�mlaut"); // FIXME: change MockAnalyzer to not extend CharTokenizer for this test //assertQueryEquals("\"\"", new KeywordAnalyzer(), ""); @@ -470,7 +470,7 @@ } public void testPunct() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertQueryEquals("a&b", a, "a&b"); assertQueryEquals("a&&b", a, "a&&b"); assertQueryEquals(".NET", a, ".NET"); @@ -491,7 +491,7 @@ assertQueryEquals("term 1.0 1 2", null, "term"); assertQueryEquals("term term1 term2", null, "term term term"); - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertQueryEquals("3", a, "3"); assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2"); assertQueryEquals("term term1 term2", a, "term term1 term2"); @@ -726,7 +726,7 @@ } public void testEscaped() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); /* * assertQueryEquals("\\[brackets", a, "\\[brackets"); @@ -825,7 +825,7 @@ } public void testQueryStringEscaping() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c"); assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c"); @@ -866,7 +866,7 @@ @Ignore("contrib queryparser shouldn't escape wildcard terms") public void testEscapedWildcard() throws Exception { StandardQueryParser qp = new StandardQueryParser(); - qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r")); assertEquals(q, qp.parse("foo\\?ba?r", "field")); @@ -904,7 +904,7 @@ public void testBoost() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on")); - Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true); + Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true); StandardQueryParser qp = new StandardQueryParser(); qp.setAnalyzer(oneStopAnalyzer); @@ -920,7 +920,7 @@ assertNotNull(q); StandardQueryParser qp2 = new StandardQueryParser(); - qp2.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); + qp2.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); q = qp2.parse("the^3", "field"); // "the" is a stop word so the result is an empty query: @@ -950,7 +950,7 @@ public void testCustomQueryParserWildcard() { try { - new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("a?t", "contents"); + new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("a?t", "contents"); fail("Wildcard queries should not be allowed"); } catch (QueryNodeException expected) { // expected exception @@ -959,7 +959,7 @@ public void testCustomQueryParserFuzzy() throws Exception { try { - new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents"); + new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents"); fail("Fuzzy queries should not be allowed"); } catch (QueryNodeException expected) { // expected exception @@ -970,7 +970,7 @@ BooleanQuery.setMaxClauseCount(2); try { StandardQueryParser qp = new StandardQueryParser(); - qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); qp.parse("one two three", "field"); fail("ParseException expected due to too many boolean clauses"); @@ -984,7 +984,7 @@ */ public void testPrecedence() throws Exception { StandardQueryParser qp = new StandardQueryParser(); - qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); Query query1 = qp.parse("A AND B OR C AND D", "field"); Query query2 = qp.parse("+A +B +C +D", "field"); @@ -995,7 +995,7 @@ // Todo: Convert from DateField to DateUtil // public void testLocalDateFormat() throws IOException, QueryNodeException { // Directory ramDir = newDirectory(); -// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); +// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); // addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw); // addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw); // iw.close(); @@ -1116,7 +1116,7 @@ public void testStopwords() throws Exception { StandardQueryParser qp = new StandardQueryParser(); CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton()); - qp.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true)); + qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true)); Query result = qp.parse("a:the OR a:foo", "a"); assertNotNull("result is null and it shouldn't be", result); @@ -1140,7 +1140,7 @@ public void testPositionIncrement() throws Exception { StandardQueryParser qp = new StandardQueryParser(); qp.setAnalyzer( - new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); + new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); qp.setEnablePositionIncrements(true); @@ -1161,7 +1161,7 @@ public void testMatchAllDocs() throws Exception { StandardQueryParser qp = new StandardQueryParser(); - qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); assertEquals(new MatchAllDocsQuery(), qp.parse("*:*", "field")); assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)", "field")); @@ -1173,7 +1173,7 @@ private void assertHits(int expected, String query, IndexSearcher is) throws IOException, QueryNodeException { StandardQueryParser qp = new StandardQueryParser(); - qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); qp.setLocale(Locale.ENGLISH); Query q = qp.parse(query, "date"); Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java (revision 1091052) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java (working copy) @@ -41,7 +41,7 @@ fieldName = fName; IndexWriter writer = new IndexWriter(db, new IndexWriterConfig( Version.LUCENE_CURRENT, - new MockAnalyzer())); + new MockAnalyzer(random))); for (int j = 0; j < docs.length; j++) { Document d = new Document(); d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED)); Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (revision 1091052) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (working copy) @@ -34,7 +34,7 @@ public class TestComplexPhraseQuery extends LuceneTestCase { Directory rd; - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); DocData docsContent[] = { new DocData("john smith", "1"), new DocData("johathon smith", "2"), Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java (revision 1091052) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/precedence/TestPrecedenceQueryParser.java (working copy) @@ -125,7 +125,7 @@ public PrecedenceQueryParser getParser(Analyzer a) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); PrecedenceQueryParser qp = new PrecedenceQueryParser(); qp.setAnalyzer(a); qp.setDefaultOperator(Operator.OR); @@ -171,7 +171,7 @@ public Query getQueryDOA(String query, Analyzer a) throws Exception { if (a == null) - a = new MockAnalyzer(MockTokenizer.SIMPLE, true); + a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); PrecedenceQueryParser qp = new PrecedenceQueryParser(); qp.setAnalyzer(a); qp.setDefaultOperator(Operator.AND); @@ -232,7 +232,7 @@ "+(title:dog title:cat) -author:\"bob dole\""); PrecedenceQueryParser qp = new PrecedenceQueryParser(); - qp.setAnalyzer(new MockAnalyzer()); + qp.setAnalyzer(new MockAnalyzer(random)); // make sure OR is the default: assertEquals(Operator.OR, qp.getDefaultOperator()); qp.setDefaultOperator(Operator.AND); @@ -246,7 +246,7 @@ } public void testPunct() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertQueryEquals("a&b", a, "a&b"); assertQueryEquals("a&&b", a, "a&&b"); assertQueryEquals(".NET", a, ".NET"); @@ -266,7 +266,7 @@ assertQueryEquals("term 1.0 1 2", null, "term"); assertQueryEquals("term term1 term2", null, "term term term"); - Analyzer a = new MockAnalyzer(); + Analyzer a = new MockAnalyzer(random); assertQueryEquals("3", a, "3"); assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2"); assertQueryEquals("term term1 term2", a, "term term1 term2"); @@ -405,7 +405,7 @@ final String defaultField = "default"; final String monthField = "month"; final String hourField = "hour"; - PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer()); + PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer(random)); Map fieldMap = new HashMap(); // set a field specific date resolution @@ -467,7 +467,7 @@ } public void testEscaped() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertQueryEquals("a\\-b:c", a, "a-b:c"); assertQueryEquals("a\\+b:c", a, "a+b:c"); @@ -533,7 +533,7 @@ public void testBoost() throws Exception { CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on")); - Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true); + Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true); PrecedenceQueryParser qp = new PrecedenceQueryParser(); qp.setAnalyzer(oneStopAnalyzer); @@ -548,7 +548,7 @@ q = qp.parse("\"on\"^1.0", "field"); assertNotNull(q); - q = getParser(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3", + q = getParser(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3", "field"); assertNotNull(q); } @@ -564,7 +564,7 @@ public void testBooleanQuery() throws Exception { BooleanQuery.setMaxClauseCount(2); try { - getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("one two three", "field"); + getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("one two three", "field"); fail("ParseException expected due to too many boolean clauses"); } catch (QueryNodeException expected) { // too many boolean clauses, so ParseException is expected @@ -573,7 +573,7 @@ // LUCENE-792 public void testNOT() throws Exception { - Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); assertQueryEquals("NOT foo AND bar", a, "-foo +bar"); } @@ -582,7 +582,7 @@ * issue has been corrected. */ public void testPrecedence() throws Exception { - PrecedenceQueryParser parser = getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + PrecedenceQueryParser parser = getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); Query query1 = parser.parse("A AND B OR C AND D", "field"); Query query2 = parser.parse("(A AND B) OR (C AND D)", "field"); assertEquals(query1, query2); Index: lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java =================================================================== --- lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java (revision 1091052) +++ lucene/contrib/wordnet/src/test/org/apache/lucene/wordnet/TestWordnet.java (working copy) @@ -63,7 +63,7 @@ private void assertExpandsTo(String term, String expected[]) throws IOException { Query expandedQuery = SynExpand.expand(term, searcher, new - MockAnalyzer(), "field", 1F); + MockAnalyzer(random), "field", 1F); BooleanQuery expectedQuery = new BooleanQuery(); for (String t : expected) expectedQuery.add(new TermQuery(new Term("field", t)), Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (revision 1091052) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (working copy) @@ -59,7 +59,7 @@ // make sure a Directory acts the same Directory d = newDirectory(); - new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close(); + new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close(); r = IndexReader.open(d, false); testNorms(r); r.close(); @@ -84,7 +84,7 @@ // make sure a Directory acts the same Directory d = newDirectory(); - new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close(); + new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close(); r = IndexReader.open(d, false); termsEnumTest(r); r.close(); Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (revision 1091052) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (working copy) @@ -65,7 +65,7 @@ // create dir data IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); for (int i = 0; i < 20; i++) { Document document = new Document(); @@ -91,7 +91,7 @@ // create dir data IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); indexWriter.setInfoStream(VERBOSE ? System.out : null); if (VERBOSE) { System.out.println("TEST: make test index"); @@ -104,7 +104,7 @@ indexWriter.close(); // test ii writer - InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(), true); + InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(random), true); for (int i = 0; i < 500; i++) { Document document = new Document(); assembleDocument(document, i); Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (revision 1091052) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (working copy) @@ -34,17 +34,17 @@ public void test() throws Exception { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addDocument(iw, "Hello, world!"); addDocument(iw, "All work and no play makes jack a dull boy"); iw.close(); - iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); addDocument(iw, "Hello, tellus!"); addDocument(iw, "All work and no play makes danny a dull boy"); iw.close(); - iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); + iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); addDocument(iw, "Hello, earth!"); addDocument(iw, "All work and no play makes wendy a dull girl"); iw.close(); Index: lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java (working copy) @@ -134,7 +134,7 @@ public void testCodec() throws Exception { Directory dir = new AppendingRAMDirectory(random, new RAMDirectory()); - IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer()); + IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random)); cfg.setCodecProvider(new AppendingCodecProvider()); ((TieredMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false); Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (working copy) @@ -32,7 +32,7 @@ public void setUp() throws Exception { super.setUp(); dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Document doc; for (int i = 0; i < NUM_DOCS; i++) { doc = new Document(); Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (working copy) @@ -25,7 +25,7 @@ public void test() throws Exception { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc; Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (working copy) @@ -61,7 +61,7 @@ super.setUp(); store = newDirectory(); IndexWriter writer = new IndexWriter(store, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); for (int i = 0; i < NUM_DOCS; i++) { Document d = new Document(); Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java (working copy) @@ -39,7 +39,7 @@ mergePolicy.setNoCFSRatio(1); IndexWriter iw = new IndexWriter( fsDir, - new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()). + new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)). setOpenMode(OpenMode.CREATE). setMergePolicy(mergePolicy) ); Index: lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (working copy) @@ -40,7 +40,7 @@ public static void setUpClass() throws Exception { dir = newDirectory(); writer = new IndexWriter(dir, newIndexWriterConfig(random, - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)) + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)) .setMaxBufferedDocs(2)); writer.setInfoStream(VERBOSE ? System.out : null); indexDocs(writer); Index: lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (revision 1091052) +++ lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (working copy) @@ -66,7 +66,7 @@ super.setUp(); store = newDirectory(); IndexWriter writer = new IndexWriter(store, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); for (int i = 0; i < NUM_DOCS; i++) { Document d = new Document(); Index: lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java =================================================================== --- lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (revision 1091052) +++ lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (working copy) @@ -49,7 +49,7 @@ @BeforeClass public static void beforeClass() throws Exception { // TODO: rewrite test (this needs to set QueryParser.enablePositionIncrements, too, for work with CURRENT): - Analyzer analyzer=new MockAnalyzer(MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET, false); + Analyzer analyzer=new MockAnalyzer(random, MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET, false); //initialize the parser builder=new CorePlusExtensionsParser("contents",analyzer); Index: lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java =================================================================== --- lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (revision 1091052) +++ lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (working copy) @@ -44,7 +44,7 @@ public class TestQueryTemplateManager extends LuceneTestCase { CoreParser builder; - Analyzer analyzer=new MockAnalyzer(); + Analyzer analyzer=new MockAnalyzer(random); private IndexSearcher searcher; private Directory dir; Index: lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java =================================================================== --- lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java (revision 1091052) +++ lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java (working copy) @@ -71,7 +71,7 @@ super.setUp(); directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); setUpPlotter( 2, 15); Index: lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java =================================================================== --- lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java (revision 1091052) +++ lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java (working copy) @@ -47,7 +47,7 @@ public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); addData(writer); } Index: lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java =================================================================== --- lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java (revision 1091052) +++ lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java (working copy) @@ -58,7 +58,7 @@ final String TEXT = "the fox jumped"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); document.add(new Field(FIELD, new TokenStreamConcurrent(), @@ -102,7 +102,7 @@ final String TEXT = "the fox jumped"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); document.add(new Field(FIELD, new TokenStreamConcurrent(), @@ -172,7 +172,7 @@ final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); document.add(new Field(FIELD, new TokenStreamSparse(), @@ -215,7 +215,7 @@ final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); document.add(new Field(FIELD, TEXT, Store.YES, Index.ANALYZED, @@ -256,7 +256,7 @@ final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); document.add(new Field(FIELD, new TokenStreamSparse(), Index: lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java =================================================================== --- lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java (revision 1091052) +++ lucene/contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java (working copy) @@ -90,7 +90,7 @@ Directory ramDir; public IndexSearcher searcher = null; int numHighlights = 0; - final Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); + final Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); TopDocs hits; String[] texts = { @@ -101,7 +101,7 @@ "wordx wordy wordz wordx wordy wordx worda wordb wordy wordc", "y z x y z a b", "lets is a the lets is a the lets is a the lets" }; public void testQueryScorerHits() throws Exception { - Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true); + Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true); QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer); query = qp.parse("\"very long\""); searcher = new IndexSearcher(ramDir, true); @@ -133,7 +133,7 @@ String s1 = "I call our world Flatland, not because we call it so,"; - QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); + QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); // Verify that a query against the default field results in text being // highlighted @@ -165,7 +165,7 @@ */ private static String highlightField(Query query, String fieldName, String text) throws IOException, InvalidTokenOffsetsException { - TokenStream tokenStream = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text)); + TokenStream tokenStream = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text)); // Assuming "", "" used to highlight SimpleHTMLFormatter formatter = new SimpleHTMLFormatter(); QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME); @@ -210,7 +210,7 @@ String f2c = f2 + ":"; String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2 + " OR " + f2c + ph2 + ")"; - Analyzer analyzer = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer); Query query = qp.parse(q); @@ -1134,13 +1134,13 @@ sb.append("stoppedtoken"); } SimpleHTMLFormatter fm = new SimpleHTMLFormatter(); - Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream( + Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream( "data", new StringReader(sb.toString())), fm);// new Highlighter(fm, // new // QueryTermScorer(query)); hg.setTextFragmenter(new NullFragmenter()); hg.setMaxDocCharsToAnalyze(100); - match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString()); + match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString()); assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg .getMaxDocCharsToAnalyze()); @@ -1151,7 +1151,7 @@ // + whitespace) sb.append(" "); sb.append(goodWord); - match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString()); + match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString()); assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg .getMaxDocCharsToAnalyze()); } @@ -1170,10 +1170,10 @@ String text = "this is a text with searchterm in it"; SimpleHTMLFormatter fm = new SimpleHTMLFormatter(); - Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm); + Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm); hg.setTextFragmenter(new NullFragmenter()); hg.setMaxDocCharsToAnalyze(36); - String match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "text", text); + String match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "text", text); assertTrue( "Matched text should contain remainder of text after highlighted query ", match.endsWith("in it")); @@ -1191,7 +1191,7 @@ // test to show how rewritten query can still be used if (searcher != null) searcher.close(); searcher = new IndexSearcher(ramDir, true); - Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); + Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer); Query query = parser.parse("JF? or Kenned*"); @@ -1446,64 +1446,64 @@ Highlighter highlighter; String result; - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo"); highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10"); highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi"); highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed"); highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed"); highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed"); highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); // ///////////////// same tests, just put the bigger overlapping token // first - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo"); highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2a(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10"); highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2a(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi"); highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2a(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed"); highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2a(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed"); highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2a(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); - query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed"); + query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed"); highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this); result = highlighter.getBestFragments(getTS2a(), s, 3, "..."); assertEquals("Hi-Speed10 foo", result); @@ -1514,7 +1514,7 @@ } private Directory dir; - private Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + private Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); public void testWeightedTermsWithDeletes() throws IOException, ParseException, InvalidTokenOffsetsException { makeIndex(); @@ -1529,7 +1529,7 @@ } private void makeIndex() throws IOException { - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) ); writer.addDocument( doc( "t_text1", "more random words for second field del" ) ); writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) ); @@ -1539,7 +1539,7 @@ } private void deleteDocument() throws IOException { - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND)); writer.deleteDocuments( new Term( "t_text1", "del" ) ); // To see negative idf, keep comment the following line //writer.optimize(); @@ -1644,7 +1644,7 @@ dir = newDirectory(); ramDir = newDirectory(); IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true))); + TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true))); for (String text : texts) { addDoc(writer, text); } Index: lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java =================================================================== --- lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java (revision 1091052) +++ lucene/contrib/highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java (working copy) @@ -87,9 +87,9 @@ @Override public void setUp() throws Exception { super.setUp(); - analyzerW = new MockAnalyzer(MockTokenizer.WHITESPACE, false); + analyzerW = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); analyzerB = new BigramAnalyzer(); - analyzerK = new MockAnalyzer(MockTokenizer.KEYWORD, false); + analyzerK = new MockAnalyzer(random, MockTokenizer.KEYWORD, false); paW = new QueryParser(TEST_VERSION_CURRENT, F, analyzerW ); paB = new QueryParser(TEST_VERSION_CURRENT, F, analyzerB ); dir = newDirectory(); Index: lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java =================================================================== --- lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java (revision 1091052) +++ lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestDirectSpellChecker.java (working copy) @@ -35,7 +35,7 @@ spellChecker.setMinQueryLength(0); Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, - new MockAnalyzer(MockTokenizer.SIMPLE, true)); + new MockAnalyzer(random, MockTokenizer.SIMPLE, true)); for (int i = 0; i < 20; i++) { Document doc = new Document(); @@ -93,7 +93,7 @@ public void testOptions() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random, dir, - new MockAnalyzer(MockTokenizer.SIMPLE, true)); + new MockAnalyzer(random, MockTokenizer.SIMPLE, true)); Document doc = new Document(); doc.add(newField("text", "foobar", Field.Store.NO, Field.Index.ANALYZED)); Index: lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java =================================================================== --- lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java (revision 1091052) +++ lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java (working copy) @@ -46,7 +46,7 @@ public void setUp() throws Exception { super.setUp(); store = newDirectory(); - IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))); Document doc; Index: lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java =================================================================== --- lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (revision 1091052) +++ lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (working copy) @@ -54,7 +54,7 @@ //create a user index userindex = newDirectory(); IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); for (int i = 0; i < 1000; i++) { Document doc = new Document(); Index: lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java =================================================================== --- lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (revision 1091052) +++ lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (working copy) @@ -143,9 +143,9 @@ */ private Analyzer randomAnalyzer() { switch(random.nextInt(3)) { - case 0: return new MockAnalyzer(MockTokenizer.SIMPLE, true); - case 1: return new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); - default: return new MockAnalyzer(MockTokenizer.WHITESPACE, false); + case 0: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true); + case 1: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); + default: return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false); } } Index: lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (revision 1091052) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (working copy) @@ -74,7 +74,7 @@ Map originalValues = getOriginalValues(); MoreLikeThis mlt = new MoreLikeThis(reader); - mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); mlt.setMinDocFreq(1); mlt.setMinTermFreq(1); mlt.setMinWordLen(1); @@ -109,7 +109,7 @@ private Map getOriginalValues() throws IOException { Map originalValues = new HashMap(); MoreLikeThis mlt = new MoreLikeThis(reader); - mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); mlt.setMinDocFreq(1); mlt.setMinTermFreq(1); mlt.setMinWordLen(1); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (revision 1091052) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (working copy) @@ -43,7 +43,7 @@ public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); //Add series of docs with filterable fields : url, text and dates flags addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101"); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (revision 1091052) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (working copy) @@ -39,7 +39,7 @@ public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)); //Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags addDoc(writer, "admin guest", "010", "20040101","Y"); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (revision 1091052) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (working copy) @@ -56,7 +56,7 @@ public void testSpanRegex() throws Exception { Directory directory = newDirectory(); IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer())); + TEST_VERSION_CURRENT, new MockAnalyzer(random))); Document doc = new Document(); // doc.add(newField("field", "the quick brown fox jumps over the lazy dog", // Field.Store.NO, Field.Index.ANALYZED)); @@ -97,14 +97,14 @@ // creating first index writer IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); writerA.addDocument(lDoc); writerA.optimize(); writerA.close(); // creating second index writer IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE)); writerB.addDocument(lDoc2); writerB.optimize(); writerB.close(); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (revision 1091052) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (working copy) @@ -34,13 +34,13 @@ private Directory directory; private IndexSearcher searcher; private IndexReader reader; - private Analyzer analyzer=new MockAnalyzer(); + private Analyzer analyzer=new MockAnalyzer(random); @Override public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); //Add series of docs with misspelt names addDoc(writer, "jonathon smythe","1"); @@ -121,7 +121,7 @@ } public void testFuzzyLikeThisQueryEquals() { - Analyzer analyzer = new MockAnalyzer(); + Analyzer analyzer = new MockAnalyzer(random); FuzzyLikeThisQuery fltq1 = new FuzzyLikeThisQuery(10, analyzer); fltq1.addTerms("javi", "subject", 0.5f, 2); FuzzyLikeThisQuery fltq2 = new FuzzyLikeThisQuery(10, analyzer);