Index: lucene/backwards/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (working copy) @@ -60,18 +60,18 @@ String[] fields = {"b", "t"}; Occur occur[] = {Occur.SHOULD, Occur.SHOULD}; TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer(); - MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, a); + MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, a); Query q = mfqp.parse(qtxt); assertEquals(expectedRes, q.toString()); - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, qtxt, fields, occur, a); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, qtxt, fields, occur, a); assertEquals(expectedRes, q.toString()); } public void testSimple() throws Exception { String[] fields = {"b", "t"}; - MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); Query q = mfqp.parse("one"); assertEquals("b:one t:one", q.toString()); @@ -134,7 +134,7 @@ boosts.put("b", Float.valueOf(5)); boosts.put("t", Float.valueOf(10)); String[] fields = {"b", "t"}; - MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), boosts); + MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT), boosts); //Check for simple @@ -160,24 +160,24 @@ public void testStaticMethod1() throws ParseException { String[] fields = {"b", "t"}; String[] queries = {"one", "two"}; - Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("b:one t:two", q.toString()); String[] queries2 = {"+one", "+two"}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries2, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries2, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("(+b:one) (+t:two)", q.toString()); String[] queries3 = {"one", "+two"}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries3, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries3, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("b:one (+t:two)", q.toString()); String[] queries4 = {"one +more", "+two"}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries4, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries4, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("(b:one +b:more) (+t:two)", q.toString()); String[] queries5 = {"blah"}; try { - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries5, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries5, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -187,11 +187,11 @@ TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer(); String[] queries6 = {"((+stop))", "+((stop))"}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries6, fields, stopA); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries6, fields, stopA); assertEquals("", q.toString()); String[] queries7 = {"one ((+stop)) +more", "+((stop)) +two"}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries7, fields, stopA); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries7, fields, stopA); assertEquals("(b:one +b:more) (+t:two)", q.toString()); } @@ -199,15 +199,15 @@ public void testStaticMethod2() throws ParseException { String[] fields = {"b", "t"}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT}; - Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("+b:one -t:one", q.toString()); - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -218,17 +218,17 @@ String[] fields = {"b", "t"}; //int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT}; - MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT)); - Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));//, fields, flags, new StandardAnalyzer()); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));//, fields, flags, new StandardAnalyzer()); assertEquals("+b:one -t:one", q.toString()); - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("+(b:one b:two) -(t:one t:two)", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -240,12 +240,12 @@ String[] fields = {"f1", "f2", "f3"}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD}; - Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("+f1:one -f2:two f3:three", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -256,12 +256,12 @@ String[] queries = {"one", "two"}; String[] fields = {"b", "t"}; BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT}; - Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT)); assertEquals("+b:one -t:two", q.toString()); try { BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST}; - q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT)); fail(); } catch(IllegalArgumentException e) { // expected exception, array length differs @@ -270,7 +270,7 @@ public void testAnalyzerReturningNull() throws ParseException { String[] fields = new String[] { "f1", "f2", "f3" }; - MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new AnalyzerReturningNull()); + MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new AnalyzerReturningNull()); Query q = parser.parse("bla AND blo"); assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString()); // the following queries are not affected as their terms are not analyzed anyway: @@ -283,7 +283,7 @@ } public void testStopWordSearching() throws Exception { - Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); Directory ramDir = new RAMDirectory(); IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); @@ -292,7 +292,7 @@ iw.close(); MultiFieldQueryParser mfqp = - new MultiFieldQueryParser(Version.LUCENE_CURRENT, new String[] {"body"}, analyzer); + new MultiFieldQueryParser(TEST_VERSION_CURRENT, new String[] {"body"}, analyzer); mfqp.setDefaultOperator(QueryParser.Operator.AND); Query q = mfqp.parse("the footest"); IndexSearcher is = new IndexSearcher(ramDir, true); @@ -305,7 +305,7 @@ * Return empty tokens for field "f1". */ private static class AnalyzerReturningNull extends Analyzer { - StandardAnalyzer stdAnalyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + StandardAnalyzer stdAnalyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); public AnalyzerReturningNull() { } Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestStandardAnalyzer.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/analysis/TestStandardAnalyzer.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/analysis/TestStandardAnalyzer.java (working copy) @@ -27,16 +27,16 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase { - private Analyzer a = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + private Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT); public void testMaxTermLength() throws Exception { - StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT); sa.setMaxTokenLength(5); assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "xy", "z"}); } public void testMaxTermLength2() throws Exception { - StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT); assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "toolong", "xy", "z"}); sa.setMaxTokenLength(5); @@ -100,7 +100,7 @@ public void testLucene1140() throws Exception { try { - StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); assertAnalyzesTo(analyzer, "www.nutch.org.", new String[]{ "www.nutch.org" }, new String[] { "" }); } catch (NullPointerException e) { fail("Should not throw an NPE and it did"); @@ -110,7 +110,7 @@ public void testDomainNames() throws Exception { // Current lucene should not show the bug - StandardAnalyzer a2 = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + StandardAnalyzer a2 = new StandardAnalyzer(TEST_VERSION_CURRENT); // domain names assertAnalyzesTo(a2, "www.nutch.org", new String[]{"www.nutch.org"}); Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestAnalyzers.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/analysis/TestAnalyzers.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/analysis/TestAnalyzers.java (working copy) @@ -75,7 +75,7 @@ } public void testStop() throws Exception { - Analyzer a = new StopAnalyzer(Version.LUCENE_CURRENT); + Analyzer a = new StopAnalyzer(TEST_VERSION_CURRENT); assertAnalyzesTo(a, "foo bar FOO BAR", new String[] { "foo", "bar", "foo", "bar" }); assertAnalyzesTo(a, "foo a bar such FOO THESE BAR", @@ -123,7 +123,7 @@ /* StandardAnalyzer was made final in 3.1: private static class MyStandardAnalyzer extends StandardAnalyzer { public MyStandardAnalyzer() { - super(org.apache.lucene.util.Version.LUCENE_CURRENT); + super(org.apache.lucene.util.TEST_VERSION_CURRENT); } @Override Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestTeeSinkTokenFilter.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/analysis/TestTeeSinkTokenFilter.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/analysis/TestTeeSinkTokenFilter.java (working copy) @@ -171,10 +171,10 @@ buffer.append(English.intToEnglish(i).toUpperCase()).append(' '); } //make sure we produce the same tokens - TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString())))); + TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString())))); TokenStream sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(100)); teeStream.consumeAllTokens(); - TokenStream stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))), 100); + TokenStream stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))), 100); TermAttribute tfTok = stream.addAttribute(TermAttribute.class); TermAttribute sinkTok = sink.addAttribute(TermAttribute.class); for (int i=0; stream.incrementToken(); i++) { @@ -187,12 +187,12 @@ int tfPos = 0; long start = System.currentTimeMillis(); for (int i = 0; i < 20; i++) { - stream = new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))); + stream = new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))); PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); while (stream.incrementToken()) { tfPos += posIncrAtt.getPositionIncrement(); } - stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))), modCounts[j]); + stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))), modCounts[j]); posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); while (stream.incrementToken()) { tfPos += posIncrAtt.getPositionIncrement(); @@ -204,7 +204,7 @@ //simulate one field with one sink start = System.currentTimeMillis(); for (int i = 0; i < 20; i++) { - teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString())))); + teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString())))); sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(modCounts[j])); PositionIncrementAttribute posIncrAtt = teeStream.getAttribute(PositionIncrementAttribute.class); while (teeStream.incrementToken()) { Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (working copy) @@ -60,7 +60,7 @@ PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer()); analyzer.addAnalyzer("partnum", new KeywordAnalyzer()); - QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, "description", analyzer); + QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, "description", analyzer); Query query = queryParser.parse("partnum:Q36 AND SPACE"); ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestStopAnalyzer.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/analysis/TestStopAnalyzer.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/analysis/TestStopAnalyzer.java (working copy) @@ -29,7 +29,7 @@ public class TestStopAnalyzer extends BaseTokenStreamTestCase { - private StopAnalyzer stop = new StopAnalyzer(Version.LUCENE_CURRENT); + private StopAnalyzer stop = new StopAnalyzer(TEST_VERSION_CURRENT); private Set inValidTokens = new HashSet(); public TestStopAnalyzer(String s) { @@ -82,7 +82,7 @@ stopWordsSet.add("good"); stopWordsSet.add("test"); stopWordsSet.add("analyzer"); - StopAnalyzer newStop = new StopAnalyzer(Version.LUCENE_CURRENT, stopWordsSet); + StopAnalyzer newStop = new StopAnalyzer(TEST_VERSION_CURRENT, stopWordsSet); StringReader reader = new StringReader("This is a good test of the english stop analyzer with positions"); int expectedIncr[] = { 1, 1, 1, 3, 1, 1, 1, 2, 1}; TokenStream stream = newStop.tokenStream("test", reader); Index: lucene/backwards/src/test/org/apache/lucene/TestDemo.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/TestDemo.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/TestDemo.java (working copy) @@ -45,7 +45,7 @@ public void testDemo() throws IOException, ParseException { - Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT); + Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); // Store the index in memory: Directory directory = new RAMDirectory(); @@ -63,7 +63,7 @@ // Now search the index: IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true // Parse a simple query that searches for "text": - QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fieldname", analyzer); + QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fieldname", analyzer); Query query = parser.parse("text"); ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs; assertEquals(1, hits.length); Index: lucene/backwards/src/test/org/apache/lucene/search/TestNot.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestNot.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestNot.java (working copy) @@ -49,7 +49,7 @@ writer.close(); Searcher searcher = new IndexSearcher(store, true); - QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer()); + QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "field", new SimpleAnalyzer()); Query query = parser.parse("a NOT b"); //System.out.println(query); ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; Index: lucene/backwards/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (working copy) @@ -89,7 +89,7 @@ for (int i = 1; i < docText.length; i++) { qtxt += ' ' + docText[i]; // large query so that search will be longer } - QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, FIELD_NAME, new WhitespaceAnalyzer()); + QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new WhitespaceAnalyzer()); query = queryParser.parse(qtxt); // warm the searcher Index: lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (working copy) @@ -32,7 +32,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { public void testCachingWorks() throws Exception { Directory dir = new RAMDirectory(); - IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); writer.close(); IndexReader reader = IndexReader.open(dir, true); @@ -71,7 +71,7 @@ public void testIsCacheAble() throws Exception { Directory dir = new RAMDirectory(); - IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); writer.close(); IndexReader reader = IndexReader.open(dir, true); Index: lucene/backwards/src/test/org/apache/lucene/search/TestPhraseQuery.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestPhraseQuery.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestPhraseQuery.java (working copy) @@ -362,8 +362,8 @@ } public void testToString() throws Exception { - StopAnalyzer analyzer = new StopAnalyzer(Version.LUCENE_CURRENT); - QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", analyzer); + StopAnalyzer analyzer = new StopAnalyzer(TEST_VERSION_CURRENT); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer); qp.setEnablePositionIncrements(true); PhraseQuery q = (PhraseQuery)qp.parse("\"this hi this is a test is\""); assertEquals("field:\"? hi ? ? ? test\"", q.toString()); Index: lucene/backwards/src/test/org/apache/lucene/search/TestPositionIncrement.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestPositionIncrement.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestPositionIncrement.java (working copy) @@ -191,7 +191,7 @@ assertEquals(0, hits.length); // should not find "1 2" because there is a gap of 1 in the index - QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new StopWhitespaceAnalyzer(false)); q = (PhraseQuery) qp.parse("\"1 2\""); hits = searcher.search(q, null, 1000).scoreDocs; @@ -215,7 +215,7 @@ assertEquals(0, hits.length); // when both qp qnd stopFilter propagate increments, we should find the doc. - qp = new QueryParser(Version.LUCENE_CURRENT, "field", + qp = new QueryParser(TEST_VERSION_CURRENT, "field", new StopWhitespaceAnalyzer(true)); qp.setEnablePositionIncrements(true); q = (PhraseQuery) qp.parse("\"1 stop 2\""); Index: lucene/backwards/src/test/org/apache/lucene/search/TestBooleanOr.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestBooleanOr.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestBooleanOr.java (working copy) @@ -135,7 +135,7 @@ RAMDirectory rd = new RAMDirectory(); // - IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); // Document d = new Document(); Index: lucene/backwards/src/test/org/apache/lucene/search/TestDateSort.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestDateSort.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestDateSort.java (working copy) @@ -76,7 +76,7 @@ Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.STRING, true)); - QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer()); + QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer()); Query query = queryParser.parse("Document"); // Execute the search and process the search results. Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java (working copy) @@ -37,7 +37,7 @@ public static final String FIELD = "field"; public static final QueryParser qp = - new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer()); + new QueryParser(TEST_VERSION_CURRENT, FIELD, new WhitespaceAnalyzer()); @Override public void tearDown() throws Exception { Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpans.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpans.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpans.java (working copy) @@ -452,7 +452,7 @@ // LUCENE-1404 public void testNPESpanQuery() throws Throwable { final Directory dir = new MockRAMDirectory(); - final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT, Collections.emptySet()), IndexWriter.MaxFieldLength.LIMITED); + final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), IndexWriter.MaxFieldLength.LIMITED); // Add documents addDoc(writer, "1", "the big dogs went running to the market"); Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java (working copy) @@ -56,7 +56,7 @@ // create test index mDirectory = new RAMDirectory(); - final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); addDocument(writer, "1", "I think it should work."); addDocument(writer, "2", "I think it should work."); addDocument(writer, "3", "I think it should work."); Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced2.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced2.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced2.java (working copy) @@ -40,7 +40,7 @@ super.setUp(); // create test index - final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); + final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); addDocument(writer, "A", "Should we, could we, would we?"); addDocument(writer, "B", "It should. Should it?"); addDocument(writer, "C", "It shouldn't."); Index: lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcher.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcher.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcher.java (working copy) @@ -83,9 +83,9 @@ lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating an index writer for the first index - IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); // creating an index writer for the second index, but writing nothing - IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); //-------------------------------------------------------------------- // scenario 1 @@ -102,7 +102,7 @@ writerB.close(); // creating the query - QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fulltext", new StandardAnalyzer(TEST_VERSION_CURRENT)); Query query = parser.parse("handle:1"); // building the searchables @@ -129,7 +129,7 @@ //-------------------------------------------------------------------- // adding one document to the empty index - writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); + writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); writerB.addDocument(lDoc); writerB.optimize(); writerB.close(); @@ -175,7 +175,7 @@ readerB.close(); // optimizing the index with the writer - writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); + writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED); writerB.optimize(); writerB.close(); Index: lucene/backwards/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (working copy) @@ -33,7 +33,7 @@ public void testBasic() throws Exception { Directory dir = new RAMDirectory(); - IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, + IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); doc.add(new Field("field", "value", Store.NO, Index.ANALYZED)); Index: lucene/backwards/src/test/org/apache/lucene/search/TestSimpleExplanations.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestSimpleExplanations.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestSimpleExplanations.java (working copy) @@ -317,8 +317,8 @@ Document lDoc3 = new Document(); lDoc3.add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); - IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); - IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); writerA.addDocument(lDoc); writerA.addDocument(lDoc2); @@ -328,7 +328,7 @@ writerB.addDocument(lDoc3); writerB.close(); - QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT)); + QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fulltext", new StandardAnalyzer(TEST_VERSION_CURRENT)); Query query = parser.parse("handle:1"); Searcher[] searchers = new Searcher[2]; Index: lucene/backwards/src/test/org/apache/lucene/search/TestWildcard.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestWildcard.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestWildcard.java (working copy) @@ -240,7 +240,7 @@ public void testParsingAndSearching() throws Exception { String field = "content"; boolean dbg = false; - QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer()); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, field, new WhitespaceAnalyzer()); qp.setAllowLeadingWildcard(true); String docs[] = { "\\ abcdefg1", Index: lucene/backwards/src/test/org/apache/lucene/search/TestBoolean2.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestBoolean2.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestBoolean2.java (working copy) @@ -107,7 +107,7 @@ }; public Query makeQuery(String queryText) throws ParseException { - Query q = (new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer())).parse(queryText); + Query q = (new QueryParser(TEST_VERSION_CURRENT, field, new WhitespaceAnalyzer())).parse(queryText); return q; } Index: lucene/backwards/src/test/org/apache/lucene/search/function/FunctionTestSetup.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/function/FunctionTestSetup.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/function/FunctionTestSetup.java (working copy) @@ -88,7 +88,7 @@ // prepare a small index with just a few documents. super.setUp(); dir = new RAMDirectory(); - anlzr = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + anlzr = new StandardAnalyzer(TEST_VERSION_CURRENT); IndexWriter iw = new IndexWriter(dir, anlzr, IndexWriter.MaxFieldLength.LIMITED); // add docs not exactly in natural ID order, to verify we do check the order of docs by scores Index: lucene/backwards/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java (working copy) @@ -160,7 +160,7 @@ float boost = (float) dboost; IndexSearcher s = new IndexSearcher(dir, true); FieldScoreQuery qValSrc = new FieldScoreQuery(field,tp); // a query that would score by the field - QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD,anlzr); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD,anlzr); String qtxt = "first aid text"; // from the doc texts in FunctionQuerySetup. // regular (boolean) query. Index: lucene/backwards/src/test/org/apache/lucene/search/TestExplanations.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestExplanations.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestExplanations.java (working copy) @@ -52,7 +52,7 @@ public static final String KEY = "KEY"; public static final String FIELD = "field"; public static final QueryParser qp = - new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer()); + new QueryParser(TEST_VERSION_CURRENT, FIELD, new WhitespaceAnalyzer()); @Override public void tearDown() throws Exception { Index: lucene/backwards/src/test/org/apache/lucene/search/TestFuzzyQuery.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestFuzzyQuery.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestFuzzyQuery.java (working copy) @@ -309,7 +309,7 @@ public void testGiga() throws Exception { - StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT); + StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); Directory index = new MockRAMDirectory(); IndexWriter w = new IndexWriter(index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED); @@ -334,7 +334,7 @@ IndexReader r = w.getReader(); w.close(); - Query q = new QueryParser(Version.LUCENE_CURRENT, "field", analyzer).parse( "giga~0.9" ); + Query q = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer).parse( "giga~0.9" ); // 3. search IndexSearcher searcher = new IndexSearcher(r); Index: lucene/backwards/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java (working copy) @@ -36,7 +36,7 @@ * */ public class TestMatchAllDocsQuery extends LuceneTestCase { - private Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT); + private Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT); public void testQuery() throws Exception { @@ -100,7 +100,7 @@ assertEquals(2, hits.length); // test parsable toString() - QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "key", analyzer); + QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "key", analyzer); hits = is.search(qp.parse(new MatchAllDocsQuery().toString()), null, 1000).scoreDocs; assertEquals(2, hits.length); Index: lucene/backwards/src/test/org/apache/lucene/search/TestCustomSearcherSort.java =================================================================== --- lucene/backwards/src/test/org/apache/lucene/search/TestCustomSearcherSort.java (revision 965596) +++ lucene/backwards/src/test/org/apache/lucene/search/TestCustomSearcherSort.java (working copy) @@ -70,7 +70,7 @@ private Directory getIndex() throws IOException { RAMDirectory indexStore = new RAMDirectory (); - IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); + IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED); RandomGen random = new RandomGen(newRandom()); for (int i=0; i