Index: solr/src/test/org/apache/solr/search/TestSort.java =================================================================== --- solr/src/test/org/apache/solr/search/TestSort.java (revision 984759) +++ solr/src/test/org/apache/solr/search/TestSort.java (working copy) @@ -48,7 +48,8 @@ } public void testSort() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); Document smallDoc = new Document(); // Field id = new Field("id","0", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS); Field f = new Field("f","0", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS); @@ -179,6 +180,8 @@ assertEquals(id, collectedDocs.get(j).doc); } } + searcher.close(); + dir.close(); } } Index: modules/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java =================================================================== --- modules/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java (revision 984759) +++ modules/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java (working copy) @@ -40,6 +40,7 @@ import org.apache.lucene.util.English; import java.io.IOException; import java.io.StringReader; +import java.util.Random; /** @@ -92,9 +93,10 @@ // TODO: instead of testing it this way, we can test // with BaseTokenStreamTestCase now... public void testEndOffsetPositionWithTeeSinkTokenFilter() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); TeeSinkTokenFilter tee = new TeeSinkTokenFilter(analyzer.tokenStream("field", new StringReader("abcd "))); TokenStream sink = tee.newSinkTokenStream(); Index: lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java =================================================================== --- lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (revision 984759) +++ lucene/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (working copy) @@ -17,10 +17,13 @@ * limitations under the License. */ import java.io.IOException; +import java.util.Random; + import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LogMergePolicy; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.analysis.MockAnalyzer; @@ -80,17 +83,20 @@ } public void testSubclassConcurrentMergeScheduler() throws IOException { - MockRAMDirectory dir = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); dir.failOn(new FailOnlyOnMerge()); Document doc = new Document(); Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED); doc.add(idField); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new MyMergeScheduler()) .setMaxBufferedDocs(2).setRAMBufferSizeMB( IndexWriterConfig.DISABLE_AUTO_FLUSH)); + LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy(); + logMP.setMergeFactor(10); for(int i=0;i<20;i++) writer.addDocument(doc); Index: lucene/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java =================================================================== --- lucene/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (revision 984759) +++ lucene/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (working copy) @@ -21,6 +21,7 @@ import java.io.Reader; import java.util.HashMap; import java.util.Map; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -283,8 +284,9 @@ public void testStopWordSearching() throws Exception { Analyzer analyzer = new MockAnalyzer(); - Directory ramDir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + Random random = newRandom(); + Directory ramDir = newDirectory(random); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); doc.add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED)); iw.addDocument(doc); @@ -298,6 +300,7 @@ ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs; assertEquals(1, hits.length); is.close(); + ramDir.close(); } /** Index: lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java =================================================================== --- lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java (revision 984759) +++ lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java (working copy) @@ -27,6 +27,7 @@ import java.util.GregorianCalendar; import java.util.HashSet; import java.util.Locale; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -569,9 +570,9 @@ } public void testFarsiRangeCollating() throws Exception { - - MockRAMDirectory ramDir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + Random random = newRandom(); + MockRAMDirectory ramDir = newDirectory(random); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); Document doc = new Document(); doc.add(new Field("content","\u0633\u0627\u0628", Field.Store.YES, Field.Index.NOT_ANALYZED)); @@ -610,6 +611,7 @@ assertEquals("The index Term should be included.", 1, result.length); is.close(); + ramDir.close(); } private String escapeDateString(String s) { @@ -978,8 +980,9 @@ } public void testLocalDateFormat() throws IOException, ParseException { - MockRAMDirectory ramDir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + Random random = newRandom(); + MockRAMDirectory ramDir = newDirectory(random); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw); addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw); iw.close(); @@ -991,6 +994,7 @@ assertHits(1, "{12/1/2005 TO 12/4/2005}", is); assertHits(0, "{12/3/2005 TO 12/4/2005}", is); is.close(); + ramDir.close(); } public void testStarParsing() throws Exception { @@ -1124,9 +1128,10 @@ // enableStopPositionIncr & QueryParser's enablePosIncr // "match" public void testPositionIncrements() throws Exception { - Directory dir = new MockRAMDirectory(); + Random random = newRandom(); + Directory dir = newDirectory(random); Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, a)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, a)); Document doc = new Document(); doc.add(new Field("f", "the wizard of ozzy", Field.Store.NO, Field.Index.ANALYZED)); w.addDocument(doc); Index: lucene/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java =================================================================== --- lucene/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java (revision 984759) +++ lucene/src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -26,20 +27,19 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.TermVector; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.BytesRef; public class TestCachingTokenFilter extends BaseTokenStreamTestCase { private String[] tokens = new String[] {"term1", "term2", "term3", "term2"}; public void testCaching() throws IOException { - Directory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + Directory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); Document doc = new Document(); TokenStream stream = new TokenStream() { private int index = 0; @@ -103,6 +103,7 @@ // 3) reset stream and consume tokens again stream.reset(); checkTokens(stream); + dir.close(); } private void checkTokens(TokenStream stream) throws IOException { Index: lucene/src/test/org/apache/lucene/TestExternalCodecs.java =================================================================== --- lucene/src/test/org/apache/lucene/TestExternalCodecs.java (revision 984759) +++ lucene/src/test/org/apache/lucene/TestExternalCodecs.java (working copy) @@ -600,10 +600,10 @@ public void testPerFieldCodec() throws Exception { final int NUM_DOCS = 173; - - Directory dir = new MockRAMDirectory(); + Random random = newRandom(); + Directory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, - new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(new MyCodecs())); + newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(new MyCodecs())); w.setMergeFactor(3); Document doc = new Document(); Index: lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestFuzzyQuery2.java (working copy) @@ -86,7 +86,7 @@ int bits = Integer.parseInt(reader.readLine()); int terms = (int) Math.pow(2, bits); - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, dir, new MockAnalyzer(MockTokenizer.KEYWORD, false)); Document doc = new Document(); Index: lucene/src/test/org/apache/lucene/search/TestNot.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestNot.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestNot.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.index.IndexReader; @@ -38,8 +40,9 @@ } public void testNot() throws Exception { - MockRAMDirectory store = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), store); + Random random = newRandom(); + MockRAMDirectory store = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, store); Document d1 = new Document(); d1.add(new Field("field", "a b", Field.Store.YES, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.BitSet; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -76,8 +77,9 @@ "blueberry strudel", "blueberry pizza", }; - directory = new MockRAMDirectory(); - RandomIndexWriter iw = new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter iw = new RandomIndexWriter(random, directory); for (int i=0; i dirs = new ArrayList(); + + @Override + protected void tearDown() throws Exception { + full.reader.close(); + searchX.reader.close(); + searchY.reader.close(); + full.close(); + searchX.close(); + searchY.close(); + for (Directory dir : dirs) + dir.close(); + super.tearDown(); + } // test the sorts by score and document number public void testBuiltInSorts() throws Exception { @@ -323,7 +340,7 @@ System.out.println("topn field1(field2)(docID):" + buff); } assertFalse("Found sort results out of order", fail); - + searcher.close(); } /** @@ -1015,8 +1032,8 @@ } public void testEmptyStringVsNullStringSort() throws Exception { - Directory dir = new MockRAMDirectory(); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig( + Directory dir = newDirectory(random); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("f", "", Field.Store.NO, Field.Index.NOT_ANALYZED)); @@ -1040,8 +1057,8 @@ } public void testLUCENE2142() throws IOException { - MockRAMDirectory indexStore = new MockRAMDirectory (); - IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig( + MockRAMDirectory indexStore = newDirectory (random); + IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i=0; i<5; i++) { Document doc = new Document(); @@ -1055,7 +1072,10 @@ new SortField("string", SortField.STRING), SortField.FIELD_DOC ); // this should not throw AIOOBE or RuntimeEx - new IndexSearcher (indexStore, true).search(new MatchAllDocsQuery(), null, 500, sort); + IndexSearcher searcher = new IndexSearcher(indexStore, true); + searcher.search(new MatchAllDocsQuery(), null, 500, sort); + searcher.close(); + indexStore.close(); } } Index: lucene/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java (working copy) @@ -55,7 +55,7 @@ "B 2 4 5 6", "Y 3 5 6", null, "C 3 6", "X 4 5 6" }; - small = new MockRAMDirectory(); + small = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, small, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); for (int i = 0; i < data.length; i++) { @@ -609,7 +609,7 @@ public void testFarsi() throws Exception { /* build an index */ - MockRAMDirectory farsiIndex = new MockRAMDirectory(); + MockRAMDirectory farsiIndex = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, farsiIndex, new MockAnalyzer(MockTokenizer.SIMPLE, true)); Document doc = new Document(); doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, @@ -649,7 +649,7 @@ public void testDanish() throws Exception { /* build an index */ - MockRAMDirectory danishIndex = new MockRAMDirectory(); + MockRAMDirectory danishIndex = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, danishIndex, new MockAnalyzer(MockTokenizer.SIMPLE, true)); // Danish collation orders the words below in the given order Index: lucene/src/test/org/apache/lucene/search/TestWildcardRandom.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestWildcardRandom.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestWildcardRandom.java (working copy) @@ -47,7 +47,7 @@ protected void setUp() throws Exception { super.setUp(); random = newRandom(); - dir = new MockRAMDirectory(); + dir = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, dir); Document doc = new Document(); Index: lucene/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java (working copy) @@ -28,7 +28,6 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Payload; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; @@ -42,7 +41,6 @@ import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.English; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.search.Explanation.IDFExplanation; @@ -106,8 +104,8 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); Random random = newRandom(); + directory = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new PayloadAnalyzer()) .setSimilarity(similarity)); Index: lucene/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java (working copy) @@ -112,8 +112,8 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); Random random = newRandom(); + directory = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new PayloadAnalyzer()) .setSimilarity(similarity)); Index: lucene/src/test/org/apache/lucene/search/payloads/PayloadHelper.java =================================================================== --- lucene/src/test/org/apache/lucene/search/payloads/PayloadHelper.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/payloads/PayloadHelper.java (working copy) @@ -107,6 +107,7 @@ * @return An IndexSearcher * @throws IOException */ + // TODO: randomize public IndexSearcher setUp(Similarity similarity, int numDocs) throws IOException { MockRAMDirectory directory = new MockRAMDirectory(); PayloadAnalyzer analyzer = new PayloadAnalyzer(); Index: lucene/src/test/org/apache/lucene/search/TestBooleanScorer.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestBooleanScorer.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestBooleanScorer.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Random; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -39,11 +40,12 @@ private static final String FIELD = "category"; public void testMethod() throws Exception { - MockRAMDirectory directory = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory directory = newDirectory(random); String[] values = new String[] { "1", "2", "3", "4" }; - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory); + RandomIndexWriter writer = new RandomIndexWriter(random, directory); for (int i = 0; i < values.length; i++) { Document doc = new Document(); doc.add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED)); Index: lucene/src/test/org/apache/lucene/search/TestDocIdSet.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestDocIdSet.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestDocIdSet.java (working copy) @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; +import java.util.Random; import junit.framework.Assert; @@ -101,8 +102,9 @@ public void testNullDocIdSet() throws Exception { // Tests that if a Filter produces a null DocIdSet, which is given to // IndexSearcher, everything works fine. This came up in LUCENE-1754. - Directory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + Directory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); Document doc = new Document(); doc.add(new Field("c", "val", Store.NO, Index.NOT_ANALYZED_NO_NORMS)); writer.addDocument(doc); Index: lucene/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java =================================================================== --- lucene/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -31,7 +33,6 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.Scorer; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCase; public class TestNearSpansOrdered extends LuceneTestCase { @@ -54,8 +55,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer= new RandomIndexWriter(random, directory); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); doc.add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/search/spans/TestBasics.java =================================================================== --- lucene/src/test/org/apache/lucene/search/spans/TestBasics.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/spans/TestBasics.java (working copy) @@ -18,6 +18,7 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; @@ -60,8 +61,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory, + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.SIMPLE, true)); //writer.infoStream = System.out; for (int i = 0; i < 1000; i++) { Index: lucene/src/test/org/apache/lucene/search/spans/TestSpans.java =================================================================== --- lucene/src/test/org/apache/lucene/search/spans/TestSpans.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/spans/TestSpans.java (working copy) @@ -27,7 +27,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Searcher; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexReader; @@ -39,19 +38,22 @@ import org.apache.lucene.document.Field; import org.apache.lucene.util.LuceneTestCase; import java.io.IOException; +import java.util.Random; public class TestSpans extends LuceneTestCase { private IndexSearcher searcher; private IndexReader reader; private Directory directory; - + private Random random; + public static final String field = "field"; @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(newRandom(), directory); + random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer= new RandomIndexWriter(random, directory); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); doc.add(new Field(field, docFields[i], Field.Store.YES, Field.Index.ANALYZED)); @@ -461,7 +463,7 @@ // LUCENE-1404 public void testNPESpanQuery() throws Throwable { - final Directory dir = new MockRAMDirectory(); + final Directory dir = newDirectory(random); final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); Index: lucene/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/spans/TestFieldMaskingSpanQuery.java (working copy) @@ -18,6 +18,7 @@ */ import java.util.HashSet; +import java.util.Random; import java.util.Set; import org.apache.lucene.document.Document; @@ -31,7 +32,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryUtils; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCase; public class TestFieldMaskingSpanQuery extends LuceneTestCase { @@ -55,8 +55,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer= new RandomIndexWriter(random, directory); writer.addDocument(doc(new Field[] { field("id", "0") , Index: lucene/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java =================================================================== --- lucene/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java (working copy) @@ -59,7 +59,7 @@ super.setUp(); random = newRandom(); // create test index - mDirectory = new MockRAMDirectory(); + mDirectory = newDirectory(random); final RandomIndexWriter writer = new RandomIndexWriter(random, mDirectory, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)); Index: lucene/src/test/org/apache/lucene/search/spans/TestPayloadSpans.java =================================================================== --- lucene/src/test/org/apache/lucene/search/spans/TestPayloadSpans.java (revision 984763) +++ lucene/src/test/org/apache/lucene/search/spans/TestPayloadSpans.java (working copy) @@ -46,6 +46,7 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.payloads.PayloadHelper; import org.apache.lucene.search.payloads.PayloadSpanUtil; +import org.apache.lucene.store.Directory; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCase; @@ -56,7 +57,8 @@ protected IndexReader indexReader; private IndexReader closeIndexReader; private Random rand; - + private Directory directory; + public TestPayloadSpans(String s) { super(s); } @@ -115,7 +117,7 @@ - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, directory, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new PayloadAnalyzer()).setSimilarity(similarity)); @@ -128,6 +130,7 @@ checkSpans(snq.getSpans(SlowMultiReaderWrapper.wrap(reader)), 1,new int[]{2}); reader.close(); + directory.close(); } public void testNestedSpans() throws Exception { @@ -184,6 +187,7 @@ assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 2, new int[]{3,3}); closeIndexReader.close(); + directory.close(); } public void testFirstClauseWithoutPayload() throws Exception { @@ -215,6 +219,7 @@ assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 1, new int[]{3}); closeIndexReader.close(); + directory.close(); } public void testHeavilyNestedSpanQuery() throws Exception { @@ -251,11 +256,12 @@ assertTrue("spans is null and it shouldn't be", spans != null); checkSpans(spans, 2, new int[]{8, 8}); closeIndexReader.close(); + directory.close(); } public void testShrinkToAfterShortestMatch() throws CorruptIndexException, LockObtainFailedException, IOException { - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, directory, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new TestPayloadAnalyzer())); @@ -288,11 +294,12 @@ assertTrue(payloadSet.contains("a:Noise:10")); assertTrue(payloadSet.contains("k:Noise:11")); reader.close(); + directory.close(); } public void testShrinkToAfterShortestMatch2() throws CorruptIndexException, LockObtainFailedException, IOException { - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, directory, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new TestPayloadAnalyzer())); @@ -323,11 +330,12 @@ assertTrue(payloadSet.contains("a:Noise:10")); assertTrue(payloadSet.contains("k:Noise:11")); reader.close(); + directory.close(); } public void testShrinkToAfterShortestMatch3() throws CorruptIndexException, LockObtainFailedException, IOException { - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, directory, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new TestPayloadAnalyzer())); @@ -364,10 +372,11 @@ assertTrue(payloadSet.contains("a:Noise:10")); assertTrue(payloadSet.contains("k:Noise:11")); reader.close(); + directory.close(); } public void testPayloadSpanUtil() throws Exception { - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(rand); RandomIndexWriter writer = new RandomIndexWriter(rand, directory, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new PayloadAnalyzer()).setSimilarity(similarity)); @@ -389,6 +398,7 @@ System.out.println(new String(bytes)); } reader.close(); + directory.close(); } private void checkSpans(Spans spans, int expectedNumSpans, int expectedNumPayloads, @@ -425,7 +435,7 @@ } private IndexSearcher getSearcher() throws Exception { - MockRAMDirectory directory = new MockRAMDirectory(); + directory = newDirectory(rand); String[] docs = new String[]{"xx rr yy mm pp","xx yy mm rr pp", "nopayload qq ss pp np", "one two three four five six seven eight nine ten eleven", "nine one two three four five six seven eight eleven ten"}; RandomIndexWriter writer = new RandomIndexWriter(rand, directory, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new PayloadAnalyzer()).setSimilarity(similarity)); Index: lucene/src/test/org/apache/lucene/search/TestMultiSearcher.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestMultiSearcher.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestMultiSearcher.java (working copy) @@ -30,11 +30,11 @@ import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.store.Directory; import org.apache.lucene.store.MockRAMDirectory; -import org.apache.lucene.store.MockRAMDirectory; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Map; +import java.util.Random; import java.util.Set; /** @@ -42,6 +42,14 @@ */ public class TestMultiSearcher extends LuceneTestCase { + private Random random; + + @Override + public void setUp() throws Exception { + super.setUp(); + random = newRandom(); + } + public TestMultiSearcher(String name) { super(name); @@ -57,8 +65,8 @@ public void testEmptyIndex() throws Exception { // creating two directories for indices - Directory indexStoreA = new MockRAMDirectory(); - Directory indexStoreB = new MockRAMDirectory(); + Directory indexStoreA = newDirectory(random); + Directory indexStoreB = newDirectory(random); // creating a document to store Document lDoc = new Document(); @@ -81,9 +89,9 @@ lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED)); // creating an index writer for the first index - IndexWriter writerA = new IndexWriter(indexStoreA, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); // creating an index writer for the second index, but writing nothing - IndexWriter writerB = new IndexWriter(indexStoreB, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); //-------------------------------------------------------------------- // scenario 1 @@ -127,7 +135,7 @@ //-------------------------------------------------------------------- // adding one document to the empty index - writerB = new IndexWriter(indexStoreB, new IndexWriterConfig( + writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setOpenMode(OpenMode.APPEND)); @@ -216,11 +224,11 @@ return document; } - private static void initIndex(Directory directory, int nDocs, boolean create, String contents2) throws IOException { + private static void initIndex(Random random, Directory directory, int nDocs, boolean create, String contents2) throws IOException { IndexWriter indexWriter=null; try { - indexWriter = new IndexWriter(directory, new IndexWriterConfig( + indexWriter = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode( create ? OpenMode.CREATE : OpenMode.APPEND)); @@ -238,13 +246,13 @@ MockRAMDirectory ramDirectory1, ramDirectory2; IndexSearcher indexSearcher1, indexSearcher2; - ramDirectory1 = new MockRAMDirectory(); - ramDirectory2 = new MockRAMDirectory(); + ramDirectory1 = newDirectory(random); + ramDirectory2 = newDirectory(random); Query query = new TermQuery(new Term("contents", "doc0")); // Now put the documents in a different index - initIndex(ramDirectory1, 10, true, null); // documents with a single token "doc0", "doc1", etc... - initIndex(ramDirectory2, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... + initIndex(random, ramDirectory1, 10, true, null); // documents with a single token "doc0", "doc1", etc... + initIndex(random, ramDirectory2, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... indexSearcher1 = new IndexSearcher(ramDirectory1, true); indexSearcher2 = new IndexSearcher(ramDirectory2, true); @@ -277,6 +285,10 @@ assertTrue("value is null and it shouldn't be", value != null); value = document.get("other"); assertTrue("value is not null and it should be", value == null); + indexSearcher1.close(); + indexSearcher2.close(); + ramDirectory1.close(); + ramDirectory2.close(); } /* uncomment this when the highest score is always normalized to 1.0, even when it was < 1.0 @@ -296,11 +308,11 @@ IndexSearcher indexSearcher1; ScoreDoc[] hits; - ramDirectory1=new MockRAMDirectory(); + ramDirectory1=newDirectory(random); // First put the documents in the same index - initIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc... - initIndex(ramDirectory1, nDocs, false, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... + initIndex(random, ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc... + initIndex(random, ramDirectory1, nDocs, false, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... indexSearcher1=new IndexSearcher(ramDirectory1, true); indexSearcher1.setDefaultFieldSortScoring(true, true); @@ -323,12 +335,12 @@ MockRAMDirectory ramDirectory2; IndexSearcher indexSearcher2; - ramDirectory1=new MockRAMDirectory(); - ramDirectory2=new MockRAMDirectory(); + ramDirectory1=newDirectory(random); + ramDirectory2=newDirectory(random); // Now put the documents in a different index - initIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc... - initIndex(ramDirectory2, nDocs, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... + initIndex(random, ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc... + initIndex(random, ramDirectory2, nDocs, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... indexSearcher1=new IndexSearcher(ramDirectory1, true); indexSearcher1.setDefaultFieldSortScoring(true, true); @@ -366,8 +378,8 @@ * @throws IOException */ public void testCustomSimilarity () throws IOException { - MockRAMDirectory dir = new MockRAMDirectory(); - initIndex(dir, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... + MockRAMDirectory dir = newDirectory(random); + initIndex(random, dir, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... IndexSearcher srchr = new IndexSearcher(dir, true); MultiSearcher msrchr = getMultiSearcherInstance(new Searcher[]{srchr}); @@ -403,30 +415,38 @@ // The scores from the IndexSearcher and Multisearcher should be the same // if the same similarity is used. assertEquals("MultiSearcher score must be equal to single searcher score!", score1, scoreN, 1e-6); + msrchr.close(); + srchr.close(); + dir.close(); } public void testDocFreq() throws IOException{ - MockRAMDirectory dir1 = new MockRAMDirectory(); - MockRAMDirectory dir2 = new MockRAMDirectory(); + MockRAMDirectory dir1 = newDirectory(random); + MockRAMDirectory dir2 = newDirectory(random); - initIndex(dir1, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... - initIndex(dir2, 5, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... + initIndex(random, dir1, 10, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... + initIndex(random, dir2, 5, true, "x"); // documents with two tokens "doc0" and "x", "doc1" and x, etc... IndexSearcher searcher1 = new IndexSearcher(dir1, true); IndexSearcher searcher2 = new IndexSearcher(dir2, true); MultiSearcher multiSearcher = getMultiSearcherInstance(new Searcher[]{searcher1, searcher2}); assertEquals(15, multiSearcher.docFreq(new Term("contents","x"))); + multiSearcher.close(); + searcher1.close(); + searcher2.close(); + dir1.close(); + dir2.close(); } public void testCreateDocFrequencyMap() throws IOException{ - MockRAMDirectory dir1 = new MockRAMDirectory(); - MockRAMDirectory dir2 = new MockRAMDirectory(); + MockRAMDirectory dir1 = newDirectory(random); + MockRAMDirectory dir2 = newDirectory(random); Term template = new Term("contents") ; String[] contents = {"a", "b", "c"}; HashSet termsSet = new HashSet(); for (int i = 0; i < contents.length; i++) { - initIndex(dir1, i+10, i==0, contents[i]); - initIndex(dir2, i+5, i==0, contents[i]); + initIndex(random, dir1, i+10, i==0, contents[i]); + initIndex(random, dir2, i+5, i==0, contents[i]); termsSet.add(template.createTerm(contents[i])); } IndexSearcher searcher1 = new IndexSearcher(dir1, true); @@ -437,5 +457,10 @@ for (int i = 0; i < contents.length; i++) { assertEquals(Integer.valueOf((i*2) +15), docFrequencyMap.get(template.createTerm(contents[i]))); } + multiSearcher.close(); + searcher1.close(); + searcher2.close(); + dir1.close(); + dir2.close(); } } Index: lucene/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Index; @@ -26,14 +28,14 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCase; public class TestQueryWrapperFilter extends LuceneTestCase { public void testBasic() throws Exception { - Directory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + Directory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); Document doc = new Document(); doc.add(new Field("field", "value", Store.NO, Index.ANALYZED)); writer.addDocument(doc); Index: lucene/src/test/org/apache/lucene/search/TestTermVectors.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTermVectors.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestTermVectors.java (working copy) @@ -26,7 +26,6 @@ import org.apache.lucene.index.*; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.English; import java.io.IOException; @@ -38,7 +37,7 @@ public class TestTermVectors extends LuceneTestCase { private IndexSearcher searcher; private IndexReader reader; - private Directory directory = new MockRAMDirectory(); + private Directory directory; private Random random; @@ -50,6 +49,7 @@ protected void setUp() throws Exception { super.setUp(); random = newRandom(); + directory = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.SIMPLE, true)); //writer.setUseCompoundFile(true); //writer.infoStream = System.out; @@ -115,7 +115,7 @@ } public void testTermVectorsFieldOrder() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, dir, new MockAnalyzer(MockTokenizer.SIMPLE, true)); Document doc = new Document(); doc.add(new Field("c", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); @@ -243,7 +243,7 @@ Document testDoc4 = new Document(); setupDoc(testDoc4, test4); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true)) Index: lucene/src/test/org/apache/lucene/search/TestSimpleExplanations.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestSimpleExplanations.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestSimpleExplanations.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -307,8 +309,8 @@ public void testTermQueryMultiSearcherExplain() throws Exception { // creating two directories for indices - Directory indexStoreA = new MockRAMDirectory(); - Directory indexStoreB = new MockRAMDirectory(); + Directory indexStoreA = newDirectory(random); + Directory indexStoreB = newDirectory(random); Document lDoc = new Document(); lDoc.add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); @@ -317,9 +319,9 @@ Document lDoc3 = new Document(); lDoc3.add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED)); - IndexWriter writerA = new IndexWriter(indexStoreA, new IndexWriterConfig( + IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); - IndexWriter writerB = new IndexWriter(indexStoreB, new IndexWriterConfig( + IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writerA.addDocument(lDoc); @@ -368,6 +370,8 @@ assertTrue(exp, exp.indexOf("1=3") > -1); assertTrue(exp, exp.indexOf("2=3") > -1); mSearcher.close(); + indexStoreA.close(); + indexStoreB.close(); } } Index: lucene/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java (working copy) @@ -40,7 +40,7 @@ public void testMultiValuedNRQ() throws Exception { final Random rnd = newRandom(); - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(rnd); RandomIndexWriter writer = new RandomIndexWriter(rnd, directory); DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US)); Index: lucene/src/test/org/apache/lucene/search/TestTopDocsCollector.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTopDocsCollector.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestTopDocsCollector.java (working copy) @@ -18,12 +18,12 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCase; public class TestTopDocsCollector extends LuceneTestCase { @@ -90,7 +90,7 @@ private static final float MAX_SCORE = 9.17561f; - private Directory dir = new MockRAMDirectory(); + private Directory dir; private IndexReader reader; private TopDocsCollector doSearch(int numResults) throws IOException { @@ -108,7 +108,9 @@ // populate an index with 30 documents, this should be enough for the test. // The documents have no content - the test uses MatchAllDocsQuery(). - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); for (int i = 0; i < 30; i++) { writer.addDocument(new Document()); } Index: lucene/src/test/org/apache/lucene/search/TestTermScorer.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestTermScorer.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestTermScorer.java (working copy) @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Random; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.document.Document; @@ -46,9 +47,10 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); + Random random = newRandom(); + directory = newDirectory(random); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory); + RandomIndexWriter writer = new RandomIndexWriter(random, directory); for (int i = 0; i < values.length; i++) { Document doc = new Document(); doc Index: lucene/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java (working copy) @@ -30,6 +30,7 @@ import java.io.IOException; import java.util.LinkedList; +import java.util.Random; /** * This class tests PhrasePrefixQuery class. @@ -43,8 +44,9 @@ * */ public void testPhrasePrefix() throws IOException { - MockRAMDirectory indexStore = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), indexStore); + Random random = newRandom(); + MockRAMDirectory indexStore = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, indexStore); Document doc1 = new Document(); Document doc2 = new Document(); Document doc3 = new Document(); Index: lucene/src/test/org/apache/lucene/search/TestSetNorm.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestSetNorm.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestSetNorm.java (working copy) @@ -18,6 +18,7 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.analysis.MockAnalyzer; @@ -39,8 +40,9 @@ } public void testSetNorm() throws Exception { - MockRAMDirectory store = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + Random random = newRandom(); + MockRAMDirectory store = newDirectory(random); + IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); // add the same document four times Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED); @@ -63,7 +65,8 @@ // check that searches are ordered by this boost final float[] scores = new float[4]; - new IndexSearcher(store, true).search + IndexSearcher is = new IndexSearcher(store, true); + is.search (new TermQuery(new Term("field", "word")), new Collector() { private int base = 0; @@ -85,12 +88,13 @@ return true; } }); - + is.close(); float lastScore = 0.0f; for (int i = 0; i < 4; i++) { assertTrue(scores[i] > lastScore); lastScore = scores[i]; } + store.close(); } } Index: lucene/src/test/org/apache/lucene/search/TestWildcard.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestWildcard.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestWildcard.java (working copy) @@ -99,6 +99,8 @@ q = searcher.rewrite(wq); assertTrue(q instanceof ConstantScoreQuery); assertEquals(q.getBoost(), wq.getBoost()); + searcher.close(); + indexStore.close(); } /** @@ -114,6 +116,8 @@ Query q = searcher.rewrite(wq); assertTrue(q instanceof BooleanQuery); assertEquals(0, ((BooleanQuery) q).clauses().size()); + searcher.close(); + indexStore.close(); } /** @@ -134,6 +138,8 @@ assertMatches(searcher, wq, 2); assertFalse(wq.getTermsEnum(searcher.getIndexReader()) instanceof PrefixTermsEnum); assertFalse(wq.getTermsEnum(searcher.getIndexReader()) instanceof AutomatonTermsEnum); + searcher.close(); + indexStore.close(); } /** @@ -171,6 +177,8 @@ assertMatches(searcher, new WildcardQuery(new Term("body", "*tall")), 0); assertMatches(searcher, new WildcardQuery(new Term("body", "*tal")), 1); assertMatches(searcher, new WildcardQuery(new Term("body", "*tal*")), 2); + searcher.close(); + indexStore.close(); } /** @@ -196,11 +204,13 @@ assertMatches(searcher, query4, 3); assertMatches(searcher, query5, 0); assertMatches(searcher, query6, 1); // Query: 'meta??' matches 'metals' not 'metal' + searcher.close(); + indexStore.close(); } private MockRAMDirectory getIndexStore(String field, String[] contents) throws IOException { - MockRAMDirectory indexStore = new MockRAMDirectory(); + MockRAMDirectory indexStore = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, indexStore); for (int i = 0; i < contents.length; ++i) { Document doc = new Document(); @@ -256,7 +266,7 @@ }; // prepare the index - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); RandomIndexWriter iw = new RandomIndexWriter(random, dir); for (int i = 0; i < docs.length; i++) { Document doc = new Document(); @@ -312,5 +322,6 @@ } searcher.close(); + dir.close(); } } Index: lucene/src/test/org/apache/lucene/search/TestAutomatonQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestAutomatonQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestAutomatonQuery.java (working copy) @@ -43,7 +43,7 @@ public void setUp() throws Exception { super.setUp(); Random random = newRandom(); - directory = new MockRAMDirectory(); + directory = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, directory); Document doc = new Document(); Field titleField = new Field("title", "some title", Field.Store.NO, Index: lucene/src/test/org/apache/lucene/search/BaseTestRangeFilter.java =================================================================== --- lucene/src/test/org/apache/lucene/search/BaseTestRangeFilter.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/BaseTestRangeFilter.java (working copy) @@ -34,7 +34,7 @@ public static final boolean F = false; public static final boolean T = true; - protected Random rand; + protected Random rand = newRandom(); /** * Collation interacts badly with hyphens -- collation produces different @@ -46,20 +46,23 @@ int maxR; int minR; boolean allowNegativeRandomInts; - MockRAMDirectory index = new MockRAMDirectory(); + MockRAMDirectory index; TestIndex(int minR, int maxR, boolean allowNegativeRandomInts) { this.minR = minR; this.maxR = maxR; this.allowNegativeRandomInts = allowNegativeRandomInts; + try { + index = newDirectory(rand); + } catch (IOException e) { throw new RuntimeException(e); } } } IndexReader signedIndexReader; IndexReader unsignedIndexReader; - TestIndex signedIndexDir = new TestIndex(Integer.MAX_VALUE, Integer.MIN_VALUE, true); - TestIndex unsignedIndexDir = new TestIndex(Integer.MAX_VALUE, 0, false); + TestIndex signedIndexDir; + TestIndex unsignedIndexDir; int minId = 0; int maxId = 10000; @@ -88,7 +91,8 @@ protected void setUp() throws Exception { super.setUp(); - rand = newRandom(); + signedIndexDir = new TestIndex(Integer.MAX_VALUE, Integer.MIN_VALUE, true); + unsignedIndexDir = new TestIndex(Integer.MAX_VALUE, 0, false); signedIndexReader = build(rand, signedIndexDir); unsignedIndexReader = build(rand, unsignedIndexDir); } @@ -96,6 +100,8 @@ protected void tearDown() throws Exception { signedIndexReader.close(); unsignedIndexReader.close(); + signedIndexDir.index.close(); + unsignedIndexDir.index.close(); super.tearDown(); } Index: lucene/src/test/org/apache/lucene/search/TestBoolean2.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestBoolean2.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestBoolean2.java (working copy) @@ -46,6 +46,7 @@ private static int NUM_EXTRA_DOCS = 6000; public static final String field = "field"; + private Directory directory; private Directory dir2; private int mulFactor; @@ -53,7 +54,7 @@ protected void setUp() throws Exception { super.setUp(); rnd = newRandom(); - MockRAMDirectory directory = new MockRAMDirectory(); + directory = newDirectory(rnd); RandomIndexWriter writer= new RandomIndexWriter(rnd, directory); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); @@ -96,8 +97,10 @@ @Override protected void tearDown() throws Exception { + searcher.close(); reader.close(); dir2.close(); + directory.close(); super.tearDown(); } Index: lucene/src/test/org/apache/lucene/search/TestExplanations.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestExplanations.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestExplanations.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.queryParser.QueryParser; import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.analysis.MockAnalyzer; @@ -51,6 +53,7 @@ protected IndexSearcher searcher; protected IndexReader reader; protected Directory directory; + protected Random random; public static final String KEY = "KEY"; public static final String FIELD = "field"; @@ -68,8 +71,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(newRandom(), directory); + random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer= new RandomIndexWriter(random, directory); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); doc.add(new Field(KEY, ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED)); Index: lucene/src/test/org/apache/lucene/search/function/TestOrdValues.java =================================================================== --- lucene/src/test/org/apache/lucene/search/function/TestOrdValues.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/function/TestOrdValues.java (working copy) @@ -87,6 +87,7 @@ } prevID = resID; } + s.close(); } /** @@ -131,6 +132,7 @@ : id2String(i + 1); // reverse ==> smaller values first assertTrue("id of result " + i + " shuould be " + expectedId + " != " + score, expectedId.equals(id)); } + s.close(); } /** @@ -217,7 +219,7 @@ } } } - + s.close(); // verify new values are reloaded (not reused) for a new reader s = new IndexSearcher(dir, true); if (inOrder) { @@ -245,6 +247,7 @@ } } } + s.close(); } private String testName() { Index: lucene/src/test/org/apache/lucene/search/function/TestFieldScoreQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/function/TestFieldScoreQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/function/TestFieldScoreQuery.java (working copy) @@ -91,6 +91,7 @@ assertTrue("res id "+resID+" should be < prev res id "+prevID, resID.compareTo(prevID)<0); prevID = resID; } + s.close(); } /** Test that FieldScoreQuery of Type.BYTE returns the expected scores. */ @@ -136,6 +137,7 @@ float expectedScore = expectedFieldScore(id); // "ID7" --> 7.0 assertEquals("score of " + id + " shuould be " + expectedScore + " != " + score, expectedScore, score, TEST_SCORE_TOLERANCE_DELTA); } + s.close(); } /** Test that FieldScoreQuery of Type.BYTE caches/reuses loaded values and consumes the proper RAM resources. */ @@ -210,7 +212,7 @@ } } } - + s.close(); // verify new values are reloaded (not reused) for a new reader s = new IndexSearcher(dir, true); FieldScoreQuery q = new FieldScoreQuery(field,tp); @@ -233,6 +235,7 @@ } } } + s.close(); } private String testName() { Index: lucene/src/test/org/apache/lucene/search/function/FunctionTestSetup.java =================================================================== --- lucene/src/test/org/apache/lucene/search/function/FunctionTestSetup.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/function/FunctionTestSetup.java (working copy) @@ -25,7 +25,6 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCaseJ4; import org.apache.lucene.util._TestUtil; import java.util.Random; @@ -85,9 +84,10 @@ @Override @After public void tearDown() throws Exception { - super.tearDown(); + dir.close(); dir = null; anlzr = null; + super.tearDown(); } @Override @@ -95,9 +95,9 @@ public void setUp() throws Exception { super.setUp(); // prepare a small index with just a few documents. - dir = new MockRAMDirectory(); + Random r = newRandom(); + dir = newDirectory(r); anlzr = new MockAnalyzer(); - Random r = newRandom(); IndexWriterConfig iwc = newIndexWriterConfig(r, TEST_VERSION_CURRENT, anlzr); if (doMultiSegment) { iwc.setMaxBufferedDocs(_TestUtil.nextInt(r, 2, 7)); Index: lucene/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java =================================================================== --- lucene/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java (working copy) @@ -281,6 +281,7 @@ verifyResults(boost, s, h1, h2CustomNeutral, h3CustomMul, h4CustomAdd, h5CustomMulAdd, q1, q2CustomNeutral, q3CustomMul, q4CustomAdd, q5CustomMulAdd); + s.close(); } // verify results are as expected. Index: lucene/src/test/org/apache/lucene/search/function/TestValueSource.java =================================================================== --- lucene/src/test/org/apache/lucene/search/function/TestValueSource.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/function/TestValueSource.java (working copy) @@ -28,7 +28,7 @@ public class TestValueSource extends LuceneTestCase { public void testMultiValueSource() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(newRandom()); IndexWriter w = new IndexWriter(dir, new MockAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); Document doc = new Document(); Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED); Index: lucene/src/test/org/apache/lucene/search/TestThreadSafe.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestThreadSafe.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestThreadSafe.java (working copy) @@ -18,7 +18,6 @@ import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -33,10 +32,8 @@ public class TestThreadSafe extends LuceneTestCase { Random r; Directory dir1; - Directory dir2; IndexReader ir1; - IndexReader ir2; String failure=null; @@ -142,7 +139,7 @@ public void testLazyLoadThreadSafety() throws Exception{ r = newRandom(); - dir1 = new MockRAMDirectory(); + dir1 = newDirectory(r); // test w/ field sizes bigger than the buffer of an index input buildDir(dir1, 15, 5, 2000); @@ -151,7 +148,9 @@ for (int i = 0; i < num; i++) { ir1 = IndexReader.open(dir1, false); doTest(10,100); + ir1.close(); } + dir1.close(); } } Index: lucene/src/test/org/apache/lucene/search/TestFieldCache.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestFieldCache.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestFieldCache.java (working copy) @@ -25,7 +25,6 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.MockRAMDirectory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util._TestUtil; @@ -39,7 +38,9 @@ protected IndexReader reader; private static final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER; private String[] unicodeStrings; - + private Random random; + private Directory directory; + public TestFieldCache(String s) { super(s); } @@ -47,9 +48,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - Random r = newRandom(); - MockRAMDirectory directory = new MockRAMDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(r, directory); + random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer= new RandomIndexWriter(random, directory); long theLong = Long.MAX_VALUE; double theDouble = Double.MAX_VALUE; byte theByte = Byte.MAX_VALUE; @@ -67,18 +68,18 @@ doc.add(new Field("theFloat", String.valueOf(theFloat--), Field.Store.NO, Field.Index.NOT_ANALYZED)); // sometimes skip the field: - if (r.nextInt(40) != 17) { + if (random.nextInt(40) != 17) { String s = null; - if (i > 0 && r.nextInt(3) == 1) { + if (i > 0 && random.nextInt(3) == 1) { // reuse past string -- try to find one that's not null for(int iter=0;iter<10 && s==null;iter++) { - s = unicodeStrings[r.nextInt(i)]; + s = unicodeStrings[random.nextInt(i)]; } if (s == null) { - s = _TestUtil.randomUnicodeString(r, 250); + s = _TestUtil.randomUnicodeString(random, 250); } } else { - s = _TestUtil.randomUnicodeString(r, 250); + s = _TestUtil.randomUnicodeString(random, 250); } unicodeStrings[i] = s; doc.add(new Field("theRandomUnicodeString", unicodeStrings[i], Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS)); @@ -89,6 +90,13 @@ writer.close(); } + @Override + protected void tearDown() throws Exception { + reader.close(); + directory.close(); + super.tearDown(); + } + public void testInfoStream() throws Exception { try { FieldCache cache = FieldCache.DEFAULT; @@ -202,8 +210,8 @@ } public void testEmptyIndex() throws Exception { - Directory dir = new MockRAMDirectory(); - IndexWriter writer= new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(500)); + Directory dir = newDirectory(random); + IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(500)); IndexReader r = writer.getReader(); FieldCache.DocTerms terms = FieldCache.DEFAULT.getTerms(r, "foobar"); FieldCache.DocTermsIndex termsIndex = FieldCache.DEFAULT.getTermsIndex(r, "foobar"); Index: lucene/src/test/org/apache/lucene/search/TestScorerPerf.java =================================================================== --- lucene/src/test/org/apache/lucene/search/TestScorerPerf.java (revision 984759) +++ lucene/src/test/org/apache/lucene/search/TestScorerPerf.java (working copy) @@ -36,21 +36,22 @@ */ public class TestScorerPerf extends LuceneTestCase { - Random r; + Random r = newRandom(); boolean validate = true; // set to false when doing performance testing BitSet[] sets; Term[] terms; IndexSearcher s; + Directory d; public void createDummySearcher() throws Exception { // Create a dummy index with nothing in it. // This could possibly fail if Lucene starts checking for docid ranges... - MockRAMDirectory rd = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + d = newDirectory(r); + IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(r, TEST_VERSION_CURRENT, new MockAnalyzer())); iw.addDocument(new Document()); iw.close(); - s = new IndexSearcher(rd, true); + s = new IndexSearcher(d, true); } public void createRandomTerms(int nDocs, int nTerms, double power, Directory dir) throws Exception { @@ -62,7 +63,7 @@ terms[i] = new Term("f",Character.toString((char)('A'+i))); } - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(r, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); for (int i=0; i commitUserData = new HashMap(); commitUserData.put("foo", "fighters"); @@ -138,7 +137,7 @@ } public void testIsCurrent() throws Exception { - MockRAMDirectory d = new MockRAMDirectory(); + MockRAMDirectory d = newDirectory(random); IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); @@ -167,7 +166,7 @@ * @throws Exception on error */ public void testGetFieldNames() throws Exception { - MockRAMDirectory d = new MockRAMDirectory(); + MockRAMDirectory d = newDirectory(random); // set up writer IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); @@ -259,7 +258,7 @@ } public void testTermVectors() throws Exception { - MockRAMDirectory d = new MockRAMDirectory(); + MockRAMDirectory d = newDirectory(random); // set up writer IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); @@ -289,7 +288,8 @@ assertTrue("entry is null and it shouldn't be", entry != null); if (VERBOSE) System.out.println("Entry: " + entry); } - + reader.close(); + d.close(); } private void assertTermDocsCount(String msg, @@ -312,7 +312,7 @@ } public void testBasicDelete() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = null; IndexReader reader = null; @@ -356,7 +356,7 @@ } public void testBinaryFields() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); @@ -421,12 +421,13 @@ assertEquals(bin[i], data1[i + b1.getBinaryOffset()]); } reader.close(); + dir.close(); } // Make sure attempts to make changes after reader is // closed throws IOException: public void testChangesAfterClose() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = null; IndexReader reader = null; @@ -465,11 +466,12 @@ } catch (AlreadyClosedException e) { // expected } + dir.close(); } // Make sure we get lock obtain failed exception with 2 writers: public void testLockObtainFailed() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); Term searchTerm = new Term("content", "aaa"); @@ -506,6 +508,7 @@ } writer.close(); reader.close(); + dir.close(); } // Make sure you can set norms & commit even if a reader @@ -556,7 +559,7 @@ // Make sure you can set norms & commit, and there are // no extra norms files left: public void testWritingNormsNoReader() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = null; IndexReader reader = null; Term searchTerm = new Term("content", "aaa"); @@ -728,7 +731,7 @@ try { final Directory dir; if (0 == i) - dir = new MockRAMDirectory(); + dir = newDirectory(random); else dir = getDirectory(); assertFalse(IndexReader.indexExists(dir)); @@ -764,7 +767,7 @@ } public void testVersion() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); assertFalse(IndexReader.indexExists(dir)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); @@ -787,7 +790,7 @@ } public void testLock() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); writer.close(); @@ -807,7 +810,7 @@ } public void testUndeleteAll() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); addDocumentWithFields(writer); @@ -824,7 +827,7 @@ } public void testUndeleteAllAfterClose() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); addDocumentWithFields(writer); @@ -841,7 +844,7 @@ } public void testUndeleteAllAfterCloseThenReopen() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); addDocumentWithFields(writer); @@ -878,7 +881,7 @@ int END_COUNT = 144; // First build up a starting index: - MockRAMDirectory startDir = new MockRAMDirectory(); + MockRAMDirectory startDir = newDirectory(random); IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for(int i=0;i<157;i++) { Document d = new Document(); @@ -1052,7 +1055,7 @@ } public void testDocsOutOfOrderJIRA140() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for(int i=0;i<11;i++) { addDoc(writer, "aaa"); @@ -1092,7 +1095,7 @@ public void testExceptionReleaseWriteLockJIRA768() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDoc(writer, "aaa"); writer.close(); @@ -1381,7 +1384,7 @@ public void testGetIndexCommit() throws IOException { - MockRAMDirectory d = new MockRAMDirectory(); + MockRAMDirectory d = newDirectory(random); // set up writer IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random, @@ -1430,7 +1433,7 @@ } public void testReadOnly() throws Throwable { - MockRAMDirectory d = new MockRAMDirectory(); + MockRAMDirectory d = newDirectory(random); IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocumentWithFields(writer); @@ -1493,12 +1496,13 @@ writer.close(); r3.close(); + d.close(); } // LUCENE-1474 public void testIndexReader() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.addDocument(createDocument("a")); @@ -1511,11 +1515,12 @@ reader.deleteDocuments(new Term("id", "b")); reader.close(); IndexReader.open(dir,true).close(); + dir.close(); } // LUCENE-1647 public void testIndexReaderUnDeleteAll() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); dir.setPreventDoubleWrite(false); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); @@ -1557,7 +1562,7 @@ // LUCENE-1509 public void testNoDupCommitFileNames() throws Throwable { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) @@ -1583,7 +1588,7 @@ // LUCENE-1579: Ensure that on a cloned reader, segments // reuse the doc values arrays in FieldCache public void testFieldCacheReuseAfterClone() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED)); @@ -1614,7 +1619,7 @@ // shared segments reuse the doc values arrays in // FieldCache public void testFieldCacheReuseAfterReopen() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED)); @@ -1647,7 +1652,7 @@ // LUCENE-1579: Make sure all SegmentReaders are new when // reopen switches readOnly public void testReopenChangeReadonly() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(-1)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10); Document doc = new Document(); @@ -1689,7 +1694,7 @@ // LUCENE-1586: getUniqueTermCount public void testUniqueTermCount() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); Document doc = new Document(); doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED)); @@ -1722,7 +1727,7 @@ // LUCENE-1609: don't load terms index public void testNoTermsIndex() throws Throwable { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); Document doc = new Document(); doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED)); @@ -1758,11 +1763,13 @@ // expected } } + r2.close(); + dir.close(); } // LUCENE-2046 public void testPrepareCommitIsCurrent() throws Throwable { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.commit(); Index: lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestStressIndexing2.java (working copy) @@ -64,7 +64,7 @@ public void testRandomIWReader() throws Throwable { r = newRandom(); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(r); // TODO: verify equals using IW.getReader DocsAndWriter dw = indexRandomIWReader(5, 3, 100, dir); @@ -78,9 +78,9 @@ public void testRandom() throws Throwable { r = newRandom(); - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(r); // dir1 = FSDirectory.open("foofoofoo"); - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(r); // mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1); int maxThreadStates = 1+r.nextInt(10); boolean doReaderPooling = r.nextBoolean(); @@ -92,6 +92,8 @@ // verifyEquals(dir2, dir2, "id"); verifyEquals(dir1, dir2, "id"); + dir1.close(); + dir2.close(); } public void testMultiConfig() throws Throwable { @@ -111,13 +113,15 @@ int nThreads=r.nextInt(5)+1; int iter=r.nextInt(5)+1; int range=r.nextInt(20)+1; - Directory dir1 = new MockRAMDirectory(); - Directory dir2 = new MockRAMDirectory(); + Directory dir1 = newDirectory(r); + Directory dir2 = newDirectory(r); Map docs = indexRandom(nThreads, iter, range, dir1, maxThreadStates, doReaderPooling); //System.out.println("TEST: index serial"); indexSerial(r, docs, dir2); //System.out.println("TEST: verify"); verifyEquals(dir1, dir2, "id"); + dir1.close(); + dir2.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestFlex.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFlex.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestFlex.java (working copy) @@ -31,7 +31,7 @@ // Test non-flex API emulated on flex index public void testNonFlex() throws Exception { - Directory d = new MockRAMDirectory(); + Directory d = newDirectory(newRandom()); final int DOC_COUNT = 177; @@ -65,8 +65,9 @@ } public void testTermOrd() throws Exception { - Directory d = new MockRAMDirectory(); - IndexWriter w = new IndexWriter(d, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, + Random random = newRandom(); + Directory d = newDirectory(random); + IndexWriter w = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); Document doc = new Document(); doc.add(new Field("f", "a b c", Field.Store.NO, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java (working copy) @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.Iterator; import java.util.Map; +import java.util.Random; import java.util.SortedSet; import org.apache.lucene.analysis.Analyzer; @@ -43,7 +44,7 @@ private String[] testTerms = {"this", "is", "a", "test"}; private int[][] positions = new int[testTerms.length][]; private TermVectorOffsetInfo[][] offsets = new TermVectorOffsetInfo[testTerms.length][]; - private MockRAMDirectory dir = new MockRAMDirectory(); + private MockRAMDirectory dir; private String seg; private FieldInfos fieldInfos = new FieldInfos(); private static int TERM_FREQ = 3; @@ -93,7 +94,9 @@ } Arrays.sort(tokens); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MyAnalyzer()).setMaxBufferedDocs(-1)); + Random random = newRandom(); + dir = newDirectory(random); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MyAnalyzer()).setMaxBufferedDocs(-1)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundDocStore(false); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10); @@ -122,6 +125,12 @@ fieldInfos = new FieldInfos(dir, IndexFileNames.segmentFileName(seg, "", IndexFileNames.FIELD_INFOS_EXTENSION)); } + + @Override + protected void tearDown() throws Exception { + dir.close(); + super.tearDown(); + } private class MyTokenStream extends TokenStream { int tokenUpto; @@ -182,6 +191,7 @@ assertTrue(term.equals(testTerms[i])); } } + reader.close(); } public void testPositionReader() throws IOException { @@ -224,6 +234,7 @@ //System.out.println("Term: " + term); assertTrue(term.equals(testTerms[i])); } + reader.close(); } public void testOffsetReader() throws IOException { @@ -252,6 +263,7 @@ assertTrue(termVectorOffsetInfo.equals(offsets[i][j])); } } + reader.close(); } public void testMapper() throws IOException { @@ -366,37 +378,45 @@ assertEquals(0, docNumAwareMapper.getDocumentNumber()); ir.close(); - + reader.close(); } /** * Make sure exceptions and bad params are handled appropriately */ - public void testBadParams() { + public void testBadParams() throws IOException { + TermVectorsReader reader = null; try { - TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos); + reader = new TermVectorsReader(dir, seg, fieldInfos); //Bad document number, good field number reader.get(50, testFields[0]); fail(); } catch (IOException e) { // expected exception + } finally { + reader.close(); } try { - TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos); + reader = new TermVectorsReader(dir, seg, fieldInfos); //Bad document number, no field reader.get(50); fail(); } catch (IOException e) { // expected exception + } finally { + reader.close(); } try { - TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos); + reader = new TermVectorsReader(dir, seg, fieldInfos); //good document number, bad field number TermFreqVector vector = reader.get(0, "f50"); assertTrue(vector == null); + reader.close(); } catch (IOException e) { fail(); + } finally { + reader.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java (working copy) @@ -18,8 +18,6 @@ */ import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.store.MockRAMDirectory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -27,11 +25,13 @@ import org.apache.lucene.util.BytesRef; import java.io.IOException; +import java.util.Random; public class TestSegmentTermDocs extends LuceneTestCase { private Document testDoc = new Document(); - private Directory dir = new MockRAMDirectory(); + private Directory dir; private SegmentInfo info; + private Random random; public TestSegmentTermDocs(String s) { super(s); @@ -40,9 +40,17 @@ @Override protected void setUp() throws Exception { super.setUp(); + random = newRandom(); + dir = newDirectory(random); DocHelper.setupDoc(testDoc); info = DocHelper.writeDoc(dir, testDoc); } + + @Override + protected void tearDown() throws Exception { + dir.close(); + super.tearDown(); + } public void test() { assertTrue(dir != null); @@ -103,8 +111,8 @@ } public void testSkipTo(int indexDivisor) throws IOException { - Directory dir = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer())); + Directory dir = newDirectory(random); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Term ta = new Term("content","aaa"); for(int i = 0; i < 10; i++) @@ -248,7 +256,6 @@ } public void testIndexDivisor() throws IOException { - dir = new MockRAMDirectory(); testDoc = new Document(); DocHelper.setupDoc(testDoc); DocHelper.writeDoc(dir, testDoc); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriter.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriter.java (working copy) @@ -91,7 +91,7 @@ } public void testDocCount() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = null; IndexReader reader = null; @@ -151,6 +151,7 @@ assertEquals(0, writer.maxDoc()); assertEquals(0, writer.numDocs()); writer.close(); + dir.close(); } private static void addDoc(IndexWriter writer) throws IOException @@ -187,7 +188,7 @@ Directory[] dirs = new Directory[NUM_DIR]; long inputDiskUsage = 0; for(int i=0;i thrown = new ArrayList(); - - final IndexWriter writer = new IndexWriter(new MockRAMDirectory(), + final Directory dir = newDirectory(random); + final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())) { @Override public void message(final String message) { @@ -4062,11 +4088,12 @@ // throws IllegalStateEx w/o bug fix writer.close(); + dir.close(); } // LUCENE-1442 public void testDoubleOffsetCounting() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); @@ -4102,7 +4129,7 @@ // LUCENE-1442 public void testDoubleOffsetCounting2() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); Field f = new Field("field", "abcd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); @@ -4124,7 +4151,7 @@ // LUCENE-1448 public void testEndOffsetPositionCharAnalyzer() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); Field f = new Field("field", "abcd ", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); @@ -4146,7 +4173,7 @@ // LUCENE-1448 public void testEndOffsetPositionWithCachingTokenFilter() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); @@ -4170,7 +4197,7 @@ // LUCENE-1448 public void testEndOffsetPositionStopFilter() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true))); Document doc = new Document(); @@ -4193,7 +4220,7 @@ // LUCENE-1448 public void testEndOffsetPositionStandard() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); @@ -4224,7 +4251,7 @@ // LUCENE-1448 public void testEndOffsetPositionStandardEmptyField() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); @@ -4252,7 +4279,7 @@ // LUCENE-1448 public void testEndOffsetPositionStandardEmptyField2() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); @@ -4306,7 +4333,7 @@ } public void testDeadlock() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); Document doc = new Document(); doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, @@ -4317,7 +4344,7 @@ writer.commit(); // index has 2 segments - MockRAMDirectory dir2 = new MockRAMDirectory(); + MockRAMDirectory dir2 = newDirectory(random); IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer2.addDocument(doc); writer2.close(); @@ -4346,7 +4373,10 @@ @Override public void run() { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir; + try { + dir = newDirectory(random); + } catch (IOException e) { throw new RuntimeException(e); } IndexWriter w = null; boolean first = true; while(!finish) { @@ -4442,6 +4472,7 @@ e.printStackTrace(System.out); } } + dir.close(); } } @@ -4472,7 +4503,7 @@ public void testIndexStoreCombos() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); byte[] b = new byte[50]; for(int i=0;i<50;i++) @@ -4535,7 +4566,7 @@ // LUCENE-1727: make sure doc fields are stored in order public void testStoredFieldsOrder() throws Throwable { - Directory d = new MockRAMDirectory(); + Directory d = newDirectory(random); IndexWriter w = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("zzz", "a b c", Field.Store.YES, Field.Index.NO)); @@ -4567,7 +4598,7 @@ public void testEmbeddedFFFF() throws Throwable { - Directory d = new MockRAMDirectory(); + Directory d = newDirectory(random); IndexWriter w = new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED)); @@ -4584,7 +4615,7 @@ } public void testNoDocsIndex() throws Throwable { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); @@ -4605,7 +4636,7 @@ public void testCommitThreadSafety() throws Throwable { final int NUM_THREADS = 5; final double RUN_SEC = 0.5; - final Directory dir = new MockRAMDirectory(); + final Directory dir = newDirectory(random); final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); w.commit(); @@ -4713,7 +4744,7 @@ // sort in codepoint sort order by default public void testTermUTF16SortOrder() throws Throwable { Random rnd = random; - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(rnd, dir); Document d = new Document(); // Single segment @@ -4777,7 +4808,7 @@ } public void testIndexDivisor() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, new MockAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED); StringBuilder s = new StringBuilder(); // must be > 256 @@ -4806,7 +4837,7 @@ public void testDeleteUnusedFiles() throws Exception { for(int iter=0;iter<2;iter++) { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); ((LogMergePolicy) w.getMergePolicy()).setUseCompoundFile(true); Document doc = new Document(); @@ -4865,7 +4896,7 @@ public void testDeleteUnsedFiles2() throws Exception { // Validates that iw.deleteUnusedFiles() also deletes unused index commits // in case a deletion policy which holds onto commits is used. - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) @@ -4894,6 +4925,7 @@ assertEquals(1, IndexReader.listCommits(dir).size()); writer.close(); + dir.close(); } private static class FlushCountingIndexWriter extends IndexWriter { @@ -4909,7 +4941,7 @@ public void testIndexingThenDeleting() throws Exception { final Random r = random; - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); FlushCountingIndexWriter w = new FlushCountingIndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMaxBufferedDeleteTerms(-1)); //w.setInfoStream(System.out); Document doc = new Document(); @@ -4944,7 +4976,7 @@ // Tests that if we don't call commit(), the directory has 0 commits. This has // changed since LUCENE-2386, where before IW would always commit on a fresh // new index. - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); try { IndexReader.listCommits(dir); @@ -4955,6 +4987,7 @@ // No changes still should generate a commit, because it's a new index. writer.close(); assertEquals("expected 1 commits!", 1, IndexReader.listCommits(dir).size()); + dir.close(); } public void testEmptyFSDirWithNoLock() throws Exception { @@ -4969,7 +5002,7 @@ // Tests that if IW is created over an empty Directory, some documents are // indexed, flushed (but not committed) and then IW rolls back, then no // files are left in the Directory. - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(2)); @@ -4995,6 +5028,7 @@ // Since we rolled-back above, that close should be a no-op writer.close(); assertEquals("expected a no-op close after IW.rollback()", 0, dir.listAll().length); + dir.close(); } public void testNoSegmentFile() throws IOException { @@ -5023,7 +5057,7 @@ } public void testFutureCommit() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); Document doc = new Document(); @@ -5197,7 +5231,7 @@ // LUCENE-2593 public void testCorruptionAfterDiskFullDuringMerge() throws IOException { - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); final Random rand = random; //IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderPooling(true)); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(rand, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new SerialMergeScheduler()).setReaderPooling(true)); Index: lucene/src/test/org/apache/lucene/index/TestCheckIndex.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestCheckIndex.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestCheckIndex.java (working copy) @@ -22,6 +22,7 @@ import java.io.PrintStream; import java.util.List; import java.util.ArrayList; +import java.util.Random; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.store.MockRAMDirectory; @@ -33,8 +34,9 @@ public class TestCheckIndex extends LuceneTestCase { public void testDeletedDocs() throws IOException { - MockRAMDirectory dir = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); Document doc = new Document(); doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); for(int i=0;i<19;i++) { @@ -87,6 +89,7 @@ onlySegments.add("_0"); assertTrue(checker.checkIndex(onlySegments).clean == true); + dir.close(); } public void testLuceneConstantVersion() throws IOException { Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (working copy) @@ -130,7 +130,7 @@ public void testRandomExceptions() throws Throwable { Random random = newRandom(); - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler())); @@ -166,12 +166,13 @@ r2.close(); _TestUtil.checkIndex(dir); + dir.close(); } public void testRandomExceptionsThreads() throws Throwable { - - MockRAMDirectory dir = new MockRAMDirectory(); - MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()) + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler())); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); //writer.setMaxBufferedDocs(10); @@ -213,5 +214,6 @@ r2.close(); _TestUtil.checkIndex(dir); + dir.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestTransactions.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTransactions.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestTransactions.java (working copy) @@ -195,8 +195,8 @@ public void testTransactions() throws Throwable { RANDOM = newRandom(); - MockRAMDirectory dir1 = new MockRAMDirectory(); - MockRAMDirectory dir2 = new MockRAMDirectory(); + MockRAMDirectory dir1 = newDirectory(RANDOM); + MockRAMDirectory dir2 = newDirectory(RANDOM); dir1.setPreventDoubleWrite(false); dir2.setPreventDoubleWrite(false); dir1.failOn(new RandomFailure()); @@ -225,5 +225,7 @@ for(int i=0;i= 157); + reader.close(); + dir.close(); } public void testCrashAfterClose() throws IOException { @@ -127,6 +133,8 @@ IndexReader reader = IndexReader.open(dir, false); assertEquals(157, reader.numDocs()); + reader.close(); + dir.close(); } public void testCrashAfterCloseNoWait() throws IOException { @@ -146,6 +154,8 @@ */ IndexReader reader = IndexReader.open(dir, false); assertEquals(157, reader.numDocs()); + reader.close(); + dir.close(); } public void testCrashReaderDeletes() throws IOException { @@ -167,6 +177,8 @@ */ reader = IndexReader.open(dir, false); assertEquals(157, reader.numDocs()); + reader.close(); + dir.close(); } public void testCrashReaderDeletesAfterClose() throws IOException { @@ -189,5 +201,7 @@ */ reader = IndexReader.open(dir, false); assertEquals(156, reader.numDocs()); + reader.close(); + dir.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestCodecs.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestCodecs.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestCodecs.java (working copy) @@ -278,7 +278,7 @@ final FieldData field = new FieldData("field", fieldInfos, terms, true, false); final FieldData[] fields = new FieldData[] {field}; - final Directory dir = new MockRAMDirectory(); + final Directory dir = newDirectory(RANDOM); this.write(fieldInfos, dir, fields); final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false, true, CodecProvider.getDefault().getWriter(null)); si.setHasProx(false); @@ -311,6 +311,8 @@ } assertNull(fieldsEnum.next()); + reader.close(); + dir.close(); } public void testRandomPostings() throws Throwable { @@ -326,7 +328,7 @@ fields[i] = new FieldData(fieldNames[i], fieldInfos, this.makeRandomTerms(omitTF, storePayloads), omitTF, storePayloads); } - final Directory dir = new MockRAMDirectory(); + final Directory dir = newDirectory(RANDOM); this.write(fieldInfos, dir, fields); final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, -1, SEGMENT, false, true, CodecProvider.getDefault().getWriter(null)); @@ -352,8 +354,9 @@ } public void testSepPositionAfterMerge() throws IOException { - final Directory dir = new MockRAMDirectory(); - final IndexWriterConfig config = newIndexWriterConfig(newRandom(), Version.LUCENE_31, + Random random = newRandom(); + final Directory dir = newDirectory(random); + final IndexWriterConfig config = newIndexWriterConfig(random, Version.LUCENE_31, new MockAnalyzer()); config.setCodecProvider(new MockSepCodecs()); final IndexWriter writer = new IndexWriter(dir, config); Index: lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (working copy) @@ -61,12 +61,12 @@ // Make sure running BG merges still work fine even when // we are hitting exceptions during flushing. public void testFlushExceptions() throws IOException { - - MockRAMDirectory directory = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory directory = newDirectory(random); FailOnlyOnFlush failure = new FailOnlyOnFlush(); directory.failOn(failure); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); Document doc = new Document(); Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED); doc.add(idField); @@ -107,15 +107,15 @@ // Test that deletes committed after a merge started and // before it finishes, are correctly merged back: public void testDeleteMerging() throws IOException { + Random random = newRandom(); + MockRAMDirectory directory = newDirectory(random); - MockRAMDirectory directory = new MockRAMDirectory(); - LogDocMergePolicy mp = new LogDocMergePolicy(); // Force degenerate merging so we can get a mix of // merging of segments with and without deletes at the // start: mp.setMinMergeDocs(1000); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMergePolicy(mp)); @@ -146,9 +146,8 @@ } public void testNoExtraFiles() throws IOException { - - MockRAMDirectory directory = new MockRAMDirectory(); Random random = newRandom(); + MockRAMDirectory directory = newDirectory(random); IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(2)); @@ -176,8 +175,8 @@ } public void testNoWaitClose() throws IOException { - MockRAMDirectory directory = new MockRAMDirectory(); Random random = newRandom(); + MockRAMDirectory directory = newDirectory(random); Document doc = new Document(); Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED); doc.add(idField); Index: lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestThreadedOptimize.java (working copy) @@ -139,7 +139,7 @@ */ public void testThreadedOptimize() throws Exception { Random random = newRandom(); - Directory directory = new MockRAMDirectory(); + Directory directory = newDirectory(random); runTest(random, directory, new SerialMergeScheduler()); runTest(random, directory, new ConcurrentMergeScheduler()); directory.close(); Index: lucene/src/test/org/apache/lucene/index/TestOmitTf.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestOmitTf.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestOmitTf.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Collection; +import java.util.Random; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util._TestUtil; @@ -29,12 +30,18 @@ import org.apache.lucene.search.*; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.search.Explanation.IDFExplanation; public class TestOmitTf extends LuceneTestCase { - + private Random random; + + @Override + public void setUp() throws Exception { + super.setUp(); + random = newRandom(); + } + public static class SimpleSimilarity extends Similarity { @Override public float lengthNorm(String field, int numTerms) { return 1.0f; } @Override public float queryNorm(float sumOfSquaredWeights) { return 1.0f; } @@ -59,9 +66,9 @@ // Tests whether the DocumentWriter correctly enable the // omitTermFreqAndPositions bit in the FieldInfo public void testOmitTermFreqAndPositions() throws Exception { - Directory ram = new MockRAMDirectory(); + Directory ram = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); - IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, analyzer)); + IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); Document d = new Document(); // this field will have Tf @@ -106,9 +113,9 @@ // Tests whether merging of docs that have different // omitTermFreqAndPositions for the same field works public void testMixedMerge() throws Exception { - Directory ram = new MockRAMDirectory(); + Directory ram = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); - IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(), + IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); Document d = new Document(); @@ -159,9 +166,9 @@ // field X, then adding docs that do omitTermFreqAndPositions for that same // field, public void testMixedRAM() throws Exception { - Directory ram = new MockRAMDirectory(); + Directory ram = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); - IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(), + IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); Document d = new Document(); @@ -207,9 +214,9 @@ // Verifies no *.prx exists when all fields omit term freq: public void testNoPrxFile() throws Throwable { - Directory ram = new MockRAMDirectory(); + Directory ram = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); - IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(newRandom(), + IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3)); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); lmp.setMergeFactor(2); @@ -240,9 +247,9 @@ // Test scores with one field with Term Freqs and one without, otherwise with equal content public void testBasic() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(2) .setSimilarity(new SimpleSimilarity())); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (working copy) @@ -18,24 +18,31 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util._TestUtil; import org.apache.lucene.util.LuceneTestCase; public class TestIndexWriterMergePolicy extends LuceneTestCase { - + private Random random; + + @Override + public void setUp() throws Exception { + super.setUp(); + random = newRandom(); + } + // Test the normal case public void testNormalCase() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy())); @@ -45,13 +52,14 @@ } writer.close(); + dir.close(); } // Test to see if there is over merge public void testNoOverMerge() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy())); @@ -66,16 +74,17 @@ assertTrue(noOverMerge); writer.close(); + dir.close(); } // Test the case where flush is forced after every addDoc public void testForceFlush() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); LogDocMergePolicy mp = new LogDocMergePolicy(); mp.setMinMergeDocs(100); mp.setMergeFactor(10); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(10).setMergePolicy(mp)); @@ -85,7 +94,7 @@ mp = new LogDocMergePolicy(); mp.setMergeFactor(10); - writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, + writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(mp)); mp.setMinMergeDocs(100); @@ -93,13 +102,14 @@ } writer.close(); + dir.close(); } // Test the case where mergeFactor changes public void testMergeFactorChange() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy())); @@ -118,13 +128,14 @@ checkInvariants(writer); writer.close(); + dir.close(); } // Test the case where both mergeFactor and maxBufferedDocs change public void testMaxBufferedDocsChange() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())); @@ -137,7 +148,7 @@ } writer.close(); - writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, + writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode( OpenMode.APPEND).setMaxBufferedDocs(101).setMergePolicy( new LogDocMergePolicy())); @@ -146,9 +157,9 @@ writer.close(); LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.setMergeFactor(10); - writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, + writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode( - OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp)); + OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler())); // merge policy only fixes segments on levels where merges // have been triggered, so check invariants after all adds @@ -166,15 +177,16 @@ checkInvariants(writer); writer.close(); + dir.close(); } // Test the case where a merge results in no doc at all public void testMergeDocCount0() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.setMergeFactor(100); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(10).setMergePolicy(ldmp)); @@ -190,9 +202,9 @@ ldmp = new LogDocMergePolicy(); ldmp.setMergeFactor(5); - writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, + writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode( - OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp)); + OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler())); // merge factor is changed, so check invariants after all adds for (int i = 0; i < 10; i++) { @@ -205,6 +217,7 @@ assertEquals(10, writer.maxDoc()); writer.close(); + dir.close(); } private void addDoc(IndexWriter writer) throws IOException { Index: lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestNoDeletionPolicy.java (working copy) @@ -23,6 +23,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -73,8 +74,9 @@ @Test public void testAllCommitsRemain() throws Exception { - Directory dir = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), + Random random = newRandom(); + Directory dir = newDirectory(random); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); for (int i = 0; i < 10; i++) { @@ -85,6 +87,7 @@ assertEquals("wrong number of commits !", i + 1, IndexReader.listCommits(dir).size()); } writer.close(); + dir.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestIndexCommit.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexCommit.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexCommit.java (working copy) @@ -24,7 +24,6 @@ import java.util.Map; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCaseJ4; import org.junit.Test; @@ -33,7 +32,7 @@ @Test public void testEqualsHashCode() throws Exception { // LUCENE-2417: equals and hashCode() impl was inconsistent - final Directory dir = new MockRAMDirectory(); + final Directory dir = newDirectory(newRandom()); IndexCommit ic1 = new IndexCommit() { @Override public String getSegmentsFileName() { return "a"; } @@ -63,5 +62,6 @@ assertEquals(ic1, ic2); assertEquals("hash codes are not equals", ic1.hashCode(), ic2.hashCode()); + dir.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestFieldInfos.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFieldInfos.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestFieldInfos.java (working copy) @@ -47,7 +47,7 @@ fieldInfos.add(testDoc); //Since the complement is stored as well in the fields map assertTrue(fieldInfos.size() == DocHelper.all.size()); //this is all b/c we are using the no-arg constructor - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(newRandom()); String name = "testFile"; IndexOutput output = dir.createOutput(name); assertTrue(output != null); Index: lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestDirectoryReader.java (working copy) @@ -36,6 +36,7 @@ private Document doc2; protected SegmentReader [] readers = new SegmentReader[2]; protected SegmentInfos sis; + private Random random; public TestDirectoryReader(String s) { @@ -45,7 +46,8 @@ @Override protected void setUp() throws Exception { super.setUp(); - dir = new MockRAMDirectory(); + random = newRandom(); + dir = newDirectory(random); doc1 = new Document(); doc2 = new Document(); DocHelper.setupDoc(doc1); @@ -55,6 +57,14 @@ sis = new SegmentInfos(); sis.read(dir); } + + @Override + protected void tearDown() throws Exception { + if (readers[0] != null) readers[0].close(); + if (readers[1] != null) readers[1].close(); + dir.close(); + super.tearDown(); + } protected IndexReader openReader() throws IOException { IndexReader reader; @@ -86,6 +96,7 @@ TermFreqVector vector = reader.getTermFreqVector(0, DocHelper.TEXT_FIELD_2_KEY); assertTrue(vector != null); TestSegmentReader.checkNorms(reader); + reader.close(); } public void doTestUndeleteAll() throws IOException { @@ -122,13 +133,13 @@ sis.read(dir); reader = openReader(); assertEquals( 1, reader.numDocs() ); + reader.close(); } public void testIsCurrent() throws IOException { - Random random = newRandom(); - MockRAMDirectory ramDir1=new MockRAMDirectory(); + MockRAMDirectory ramDir1=newDirectory(random); addDoc(random, ramDir1, "test foo", true); - MockRAMDirectory ramDir2=new MockRAMDirectory(); + MockRAMDirectory ramDir2=newDirectory(random); addDoc(random, ramDir2, "test blah", true); IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false)}; MultiReader mr = new MultiReader(readers); @@ -144,15 +155,16 @@ // expected exception } mr.close(); + ramDir1.close(); + ramDir2.close(); } public void testMultiTermDocs() throws IOException { - Random random = newRandom(); - MockRAMDirectory ramDir1=new MockRAMDirectory(); + MockRAMDirectory ramDir1=newDirectory(random); addDoc(random, ramDir1, "test foo", true); - MockRAMDirectory ramDir2=new MockRAMDirectory(); + MockRAMDirectory ramDir2=newDirectory(random); addDoc(random, ramDir2, "test blah", true); - MockRAMDirectory ramDir3=new MockRAMDirectory(); + MockRAMDirectory ramDir3=newDirectory(random); addDoc(random, ramDir3, "test wow", true); IndexReader[] readers1 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir3, false)}; @@ -182,6 +194,14 @@ // really a dummy assert to ensure that we got some docs and to ensure that // nothing is optimized out. assertTrue(ret > 0); + readers1[0].close(); + readers1[1].close(); + readers2[0].close(); + readers2[1].close(); + readers2[2].close(); + ramDir1.close(); + ramDir2.close(); + ramDir3.close(); } private void addDoc(Random random, MockRAMDirectory ramDir1, String s, boolean create) throws IOException { Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java (working copy) @@ -49,7 +49,7 @@ "Venice has lots of canals" }; String[] text = { "Amsterdam", "Venice" }; - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); @@ -84,7 +84,7 @@ // test when delete terms only apply to disk segments public void testNonRAMDelete() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); @@ -118,7 +118,7 @@ } public void testMaxBufferedDeletes() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); writer.deleteDocuments(new Term("foobar", "1")); @@ -132,7 +132,7 @@ // test when delete terms only apply to ram segments public void testRAMDeletes() throws IOException { for(int t=0;t<2;t++) { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4) .setMaxBufferedDeleteTerms(4)); @@ -173,7 +173,7 @@ // test when delete terms apply to both disk and ram segments public void testBothDeletes() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100) .setMaxBufferedDeleteTerms(100)); @@ -201,11 +201,13 @@ IndexReader reader = IndexReader.open(dir, true); assertEquals(5, reader.numDocs()); modifier.close(); + reader.close(); + dir.close(); } // test that batched delete terms are flushed together public void testBatchDeletes() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); @@ -248,7 +250,7 @@ // test deleteAll() public void testDeleteAll() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); @@ -294,7 +296,7 @@ // test rollback of deleteAll() public void testDeleteAllRollback() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); @@ -331,7 +333,7 @@ // test deleteAll() w/ near real-time reader public void testDeleteAllNRT() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); @@ -421,7 +423,7 @@ int END_COUNT = 144; // First build up a starting index: - MockRAMDirectory startDir = new MockRAMDirectory(); + MockRAMDirectory startDir = newDirectory(random); IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); for (int i = 0; i < 157; i++) { Document d = new Document(); @@ -587,17 +589,16 @@ + result2 + " instead of expected " + START_COUNT + " or " + END_COUNT); } } - searcher.close(); newReader.close(); - + dir.close(); if (result2 == END_COUNT) { break; } } + modifier.close(); + startDir.close(); - dir.close(); - // Try again with 10 more bytes of free space: diskFree += 10; } @@ -653,7 +654,7 @@ "Venice has lots of canals" }; String[] text = { "Amsterdam", "Venice" }; - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false)); LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy(); @@ -763,7 +764,7 @@ "Venice has lots of canals" }; String[] text = { "Amsterdam", "Venice" }; - MockRAMDirectory dir = new MockRAMDirectory(); + MockRAMDirectory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); modifier.commit(); dir.failOn(failure.reset()); @@ -787,10 +788,11 @@ TestIndexWriter.assertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files"); modifier.close(); + dir.close(); } public void testDeleteNullQuery() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter modifier = new IndexWriter(dir, new MockAnalyzer(MockTokenizer.WHITESPACE, false), IndexWriter.MaxFieldLength.UNLIMITED); for (int i = 0; i < 5; i++) { Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java (working copy) @@ -70,7 +70,7 @@ public void testUpdateDocument() throws Exception { boolean optimize = true; - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); // create the index @@ -131,7 +131,7 @@ public void testAddIndexes() throws Exception { boolean optimize = false; - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.setInfoStream(infoStream); // create the index @@ -139,7 +139,7 @@ writer.flush(false, true, true); // create a 2nd index - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer2.setInfoStream(infoStream); createIndexNoClose(!optimize, "index2", writer2); @@ -171,17 +171,18 @@ r1.close(); writer.close(); dir1.close(); + dir2.close(); } public void testAddIndexes2() throws Exception { boolean optimize = false; - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.setInfoStream(infoStream); // create a 2nd index - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer2.setInfoStream(infoStream); createIndexNoClose(!optimize, "index2", writer2); @@ -199,6 +200,7 @@ r1.close(); writer.close(); dir1.close(); + dir2.close(); } /** @@ -209,7 +211,7 @@ public void testDeleteFromIndexWriter() throws Exception { boolean optimize = true; - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2)); writer.setInfoStream(infoStream); // create the index @@ -261,7 +263,7 @@ final int numIter = 2; int numDirs = 3; - Directory mainDir = new MockRAMDirectory(); + Directory mainDir = newDirectory(random); IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); mainWriter.setInfoStream(infoStream); AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter); @@ -304,7 +306,7 @@ public AddDirectoriesThreads(int numDirs, IndexWriter mainWriter) throws Throwable { this.numDirs = numDirs; this.mainWriter = mainWriter; - addDir = new MockRAMDirectory(); + addDir = newDirectory(random); IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)); for (int i = 0; i < NUM_INIT_DOCS; i++) { Document doc = createDocument(i, "addindex", 4); @@ -410,7 +412,7 @@ * IW.getReader */ public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.setInfoStream(infoStream); IndexReader r1 = writer.getReader(); @@ -521,7 +523,7 @@ public void testMergeWarmer() throws Exception { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); // Enroll warmer MyWarmer warmer = new MyWarmer(); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, @@ -556,7 +558,7 @@ } public void testAfterCommit() throws Exception { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler())); writer.commit(); writer.setInfoStream(infoStream); @@ -589,7 +591,7 @@ // Make sure reader remains usable even if IndexWriter closes public void testAfterClose() throws Exception { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.setInfoStream(infoStream); @@ -618,7 +620,7 @@ // Stress test reopen during addIndexes public void testDuringAddIndexes() throws Exception { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.setInfoStream(infoStream); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); @@ -695,7 +697,7 @@ // Stress test reopen during add/delete public void testDuringAddDelete() throws Exception { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); final IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); writer.setInfoStream(infoStream); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); @@ -775,7 +777,7 @@ } public void testExpungeDeletes() throws Throwable { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED)); @@ -799,7 +801,7 @@ } public void testDeletesNumDocs() throws Throwable { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED)); @@ -829,16 +831,17 @@ public void testEmptyIndex() throws Exception { // Ensures that getReader works on an empty index, which hasn't been committed yet. - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); IndexReader r = w.getReader(); assertEquals(0, r.numDocs()); r.close(); w.close(); + dir.close(); } public void testSegmentWarmer() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(2).setReaderPooling(true)); ((LogMergePolicy) w.getMergePolicy()).setMergeFactor(10); Index: lucene/src/test/org/apache/lucene/index/TestFieldsReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestFieldsReader.java (working copy) @@ -45,7 +45,7 @@ import org.apache.lucene.util._TestUtil; public class TestFieldsReader extends LuceneTestCase { - private MockRAMDirectory dir = new MockRAMDirectory(); + private MockRAMDirectory dir; private Document testDoc = new Document(); private FieldInfos fieldInfos = null; private Random random; @@ -62,6 +62,7 @@ DocHelper.setupDoc(testDoc); fieldInfos.add(testDoc); random = newRandom(); + dir = newDirectory(random); IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()); ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false); ((LogMergePolicy) conf.getMergePolicy()).setUseCompoundDocStore(false); @@ -70,6 +71,11 @@ writer.close(); } + @Override + protected void tearDown() throws Exception { + dir.close(); + super.tearDown(); + } public void test() throws IOException { assertTrue(dir != null); assertTrue(fieldInfos != null); @@ -157,6 +163,7 @@ assertTrue("byte[" + i + "] is mismatched", bytes[i] == DocHelper.LAZY_FIELD_BINARY_BYTES[i]); } + reader.close(); } public void testLatentFields() throws Exception { @@ -226,6 +233,7 @@ assertTrue("byte[" + i + "] is mismatched", bytes[i] == DocHelper.LAZY_FIELD_BINARY_BYTES[i]); } + reader.close(); } @@ -278,6 +286,7 @@ count++; } assertTrue(count + " does not equal: " + 1, count == 1); + reader.close(); } /** Index: lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (working copy) @@ -31,7 +31,7 @@ AtomicInteger seq = new AtomicInteger(1); public void testIndexing() throws Exception { - Directory mainDir = new MockRAMDirectory(); + Directory mainDir = newDirectory(random); IndexWriter writer = new IndexWriter(mainDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setUseCompoundFile(false); Index: lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestTransactionRollback.java (working copy) @@ -124,7 +124,7 @@ @Override protected void setUp() throws Exception { super.setUp(); - dir = new MockRAMDirectory(); + dir = newDirectory(random); random = newRandom(); //Build index, of records 1 to 100, committing after each batch of 10 IndexDeletionPolicy sdp=new KeepAllDeletionPolicy(); @@ -143,6 +143,12 @@ w.close(); } + + @Override + protected void tearDown() throws Exception { + dir.close(); + super.tearDown(); + } // Rolls back to previous commit point class RollbackDeletionPolicy implements IndexDeletionPolicy { Index: lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestFilterIndexReader.java (working copy) @@ -132,7 +132,7 @@ */ public void testFilterIndexReader() throws Exception { Random random = newRandom(); - MockRAMDirectory directory = new MockRAMDirectory(); + MockRAMDirectory directory = newDirectory(random); IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document d1 = new Document(); @@ -150,7 +150,7 @@ writer.close(); //IndexReader reader = new TestReader(IndexReader.open(directory, true)); - MockRAMDirectory target = new MockRAMDirectory(); + MockRAMDirectory target = newDirectory(random); writer = new IndexWriter(target, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); IndexReader reader = new TestReader(IndexReader.open(directory, true)); writer.addIndexes(reader); @@ -176,5 +176,6 @@ reader.close(); directory.close(); + target.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (working copy) @@ -41,7 +41,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.BitVector; @@ -51,8 +50,8 @@ private File indexDir; public void testReopen() throws Exception { - final Directory dir1 = new MockRAMDirectory(); Random random = newRandom(); + final Directory dir1 = newDirectory(random); createIndex(random, dir1, false); performDefaultTests(new TestReopen() { @@ -70,7 +69,7 @@ }); dir1.close(); - final Directory dir2 = new MockRAMDirectory(); + final Directory dir2 = newDirectory(random); createIndex(random, dir2, true); performDefaultTests(new TestReopen() { @@ -91,9 +90,9 @@ public void testParallelReaderReopen() throws Exception { Random random = newRandom(); - final Directory dir1 = new MockRAMDirectory(); + final Directory dir1 = newDirectory(random); createIndex(random, dir1, true); - final Directory dir2 = new MockRAMDirectory(); + final Directory dir2 = newDirectory(random); createIndex(random, dir2, true); performDefaultTests(new TestReopen() { @@ -116,9 +115,9 @@ dir1.close(); dir2.close(); - final Directory dir3 = new MockRAMDirectory(); + final Directory dir3 = newDirectory(random); createIndex(random, dir3, true); - final Directory dir4 = new MockRAMDirectory(); + final Directory dir4 = newDirectory(random); createIndex(random, dir4, true); performTestsWithExceptionInReopen(new TestReopen() { @@ -161,13 +160,16 @@ dir.close(); } public void testCommitReopenRAM () throws IOException { - Directory dir = new MockRAMDirectory(); - doTestReopenWithCommit(newRandom(), dir, true); + Random random = newRandom(); + Directory dir = newDirectory(random); + doTestReopenWithCommit(random, dir, true); dir.close(); } public void testCommitRecreateRAM () throws IOException { - Directory dir = new MockRAMDirectory(); - doTestReopenWithCommit(newRandom(), dir, false); + Random random = newRandom(); + Directory dir = newDirectory(random); + doTestReopenWithCommit(random, dir, false); + dir.close(); } private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException { @@ -216,10 +218,10 @@ public void testMultiReaderReopen() throws Exception { Random random = newRandom(); - final Directory dir1 = new MockRAMDirectory(); + final Directory dir1 = newDirectory(random); createIndex(random, dir1, true); - final Directory dir2 = new MockRAMDirectory(); + final Directory dir2 = newDirectory(random); createIndex(random, dir2, true); performDefaultTests(new TestReopen() { @@ -242,10 +244,10 @@ dir1.close(); dir2.close(); - final Directory dir3 = new MockRAMDirectory(); + final Directory dir3 = newDirectory(random); createIndex(random, dir3, true); - final Directory dir4 = new MockRAMDirectory(); + final Directory dir4 = newDirectory(random); createIndex(random, dir4, true); performTestsWithExceptionInReopen(new TestReopen() { @@ -273,15 +275,15 @@ public void testMixedReaders() throws Exception { Random random = newRandom(); - final Directory dir1 = new MockRAMDirectory(); + final Directory dir1 = newDirectory(random); createIndex(random, dir1, true); - final Directory dir2 = new MockRAMDirectory(); + final Directory dir2 = newDirectory(random); createIndex(random, dir2, true); - final Directory dir3 = new MockRAMDirectory(); + final Directory dir3 = newDirectory(random); createIndex(random, dir3, false); - final Directory dir4 = new MockRAMDirectory(); + final Directory dir4 = newDirectory(random); createIndex(random, dir4, true); - final Directory dir5 = new MockRAMDirectory(); + final Directory dir5 = newDirectory(random); createIndex(random, dir5, false); performDefaultTests(new TestReopen() { @@ -362,7 +364,7 @@ public void testReferenceCounting() throws IOException { Random random = newRandom(); for (int mode = 0; mode < 4; mode++) { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); createIndex(random, dir1, true); IndexReader reader0 = IndexReader.open(dir1, false); @@ -468,9 +470,9 @@ public void testReferenceCountingMultiReader() throws IOException { Random random = newRandom(); for (int mode = 0; mode <=1; mode++) { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); createIndex(random, dir1, false); - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); createIndex(random, dir2, true); IndexReader reader1 = IndexReader.open(dir1, false); @@ -540,9 +542,9 @@ public void testReferenceCountingParallelReader() throws IOException { Random random = newRandom(); for (int mode = 0; mode <=1; mode++) { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); createIndex(random, dir1, false); - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); createIndex(random, dir2, true); IndexReader reader1 = IndexReader.open(dir1, false); @@ -614,8 +616,9 @@ } public void testNormsRefCounting() throws IOException { - Directory dir1 = new MockRAMDirectory(); - createIndex(newRandom(), dir1, false); + Random random = newRandom(); + Directory dir1 = newDirectory(random); + createIndex(random, dir1, false); IndexReader reader1 = IndexReader.open(dir1, false); SegmentReader segmentReader1 = SegmentReader.getOnlySegmentReader(reader1); @@ -704,9 +707,9 @@ } public void testThreadSafety() throws Exception { - final Directory dir = new MockRAMDirectory(); + Random random = newRandom(); + final Directory dir = newDirectory(random); final int n = 30 * RANDOM_MULTIPLIER; - Random random = newRandom(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i = 0; i < n; i++) { @@ -1105,8 +1108,9 @@ } public void testCloseOrig() throws Throwable { - Directory dir = new MockRAMDirectory(); - createIndex(newRandom(), dir, false); + Random random = newRandom(); + Directory dir = newDirectory(random); + createIndex(random, dir, false); IndexReader r1 = IndexReader.open(dir, false); IndexReader r2 = IndexReader.open(dir, false); r2.deleteDocument(0); @@ -1126,8 +1130,9 @@ } public void testDeletes() throws Throwable { - Directory dir = new MockRAMDirectory(); - createIndex(newRandom(), dir, false); // Create an index with a bunch of docs (1 segment) + Random random = newRandom(); + Directory dir = newDirectory(random); + createIndex(random, dir, false); // Create an index with a bunch of docs (1 segment) modifyIndex(0, dir); // Get delete bitVector on 1st segment modifyIndex(5, dir); // Add a doc (2 segments) @@ -1159,8 +1164,9 @@ } public void testDeletes2() throws Throwable { - Directory dir = new MockRAMDirectory(); - createIndex(newRandom(), dir, false); + Random random = newRandom(); + Directory dir = newDirectory(random); + createIndex(random, dir, false); // Get delete bitVector modifyIndex(0, dir); IndexReader r1 = IndexReader.open(dir, false); @@ -1195,8 +1201,9 @@ } public void testReopenOnCommit() throws Throwable { - Directory dir = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(newRandom(), + Random random = newRandom(); + Directory dir = newDirectory(random); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(new KeepAllCommits()).setMaxBufferedDocs(-1)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(10); for(int i=0;i<4;i++) { Index: lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java (working copy) @@ -27,17 +27,18 @@ import java.io.IOException; import java.util.Collection; +import java.util.Random; public class TestSegmentMerger extends LuceneTestCase { //The variables for the new merged segment - private Directory mergedDir = new MockRAMDirectory(); + private Directory mergedDir; private String mergedSegment = "test"; //First segment to be merged - private Directory merge1Dir = new MockRAMDirectory(); + private Directory merge1Dir; private Document doc1 = new Document(); private SegmentReader reader1 = null; //Second Segment to be merged - private Directory merge2Dir = new MockRAMDirectory(); + private Directory merge2Dir; private Document doc2 = new Document(); private SegmentReader reader2 = null; @@ -49,6 +50,10 @@ @Override protected void setUp() throws Exception { super.setUp(); + Random random = newRandom(); + mergedDir = newDirectory(random); + merge1Dir = newDirectory(random); + merge2Dir = newDirectory(random); DocHelper.setupDoc(doc1); SegmentInfo info1 = DocHelper.writeDoc(merge1Dir, doc1); DocHelper.setupDoc(doc2); @@ -56,6 +61,16 @@ reader1 = SegmentReader.get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR); reader2 = SegmentReader.get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR); } + + @Override + protected void tearDown() throws Exception { + reader1.close(); + reader2.close(); + mergedDir.close(); + merge1Dir.close(); + merge2Dir.close(); + super.tearDown(); + } public void test() { assertTrue(mergedDir != null); @@ -118,5 +133,6 @@ } TestSegmentReader.checkNorms(mergedReader); + mergedReader.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestAddIndexes.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestAddIndexes.java (working copy) @@ -44,10 +44,10 @@ public void testSimpleCase() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // two auxiliary directories - Directory aux = new MockRAMDirectory(); - Directory aux2 = new MockRAMDirectory(); + Directory aux = newDirectory(random); + Directory aux2 = newDirectory(random); IndexWriter writer = null; @@ -89,7 +89,7 @@ verifyNumDocs(dir, 190); // now add another set in. - Directory aux3 = new MockRAMDirectory(); + Directory aux3 = newDirectory(random); writer = newWriter(aux3, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); // add 40 documents addDocs(writer, 40); @@ -123,7 +123,7 @@ verifyTermDocs(dir, new Term("content", "bbb"), 50); // now add a single document - Directory aux4 = new MockRAMDirectory(); + Directory aux4 = newDirectory(random); writer = newWriter(aux4, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addDocs2(writer, 1); writer.close(); @@ -137,13 +137,18 @@ verifyNumDocs(dir, 231); verifyTermDocs(dir, new Term("content", "bbb"), 51); + dir.close(); + aux.close(); + aux2.close(); + aux3.close(); + aux4.close(); } public void testWithPendingDeletes() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); @@ -178,9 +183,9 @@ public void testWithPendingDeletes2() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); @@ -216,9 +221,9 @@ public void testWithPendingDeletes3() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); IndexWriter writer = newWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND)); @@ -256,9 +261,9 @@ // case 0: add self or exceed maxMergeDocs, expect exception public void testAddSelf() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); IndexWriter writer = null; @@ -293,6 +298,8 @@ // make sure the index is correct verifyNumDocs(dir, 100); + dir.close(); + aux.close(); } // in all the remaining tests, make the doc count of the oldest segment @@ -300,9 +307,9 @@ // case 1: no tail segments public void testNoTailSegments() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); @@ -319,14 +326,16 @@ // make sure the index is correct verifyNumDocs(dir, 1040); + dir.close(); + aux.close(); } // case 2: tail segments, invariants hold, no copy public void testNoCopySegments() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); @@ -341,14 +350,16 @@ // make sure the index is correct verifyNumDocs(dir, 1032); + dir.close(); + aux.close(); } // case 3: tail segments, invariants hold, copy, invariants hold public void testNoMergeAfterCopy() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); @@ -364,14 +375,16 @@ // make sure the index is correct verifyNumDocs(dir, 1060); + dir.close(); + aux.close(); } // case 4: tail segments, invariants hold, copy, invariants not hold public void testMergeAfterCopy() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); + Directory aux = newDirectory(random); setUpDirs(dir, aux); @@ -391,15 +404,17 @@ assertEquals(1060, writer.maxDoc()); assertEquals(1000, writer.getDocCount(0)); writer.close(); + dir.close(); + aux.close(); } // case 5: tail segments, invariants not hold public void testMoreMerges() throws IOException { // main directory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); // auxiliary directory - Directory aux = new MockRAMDirectory(); - Directory aux2 = new MockRAMDirectory(); + Directory aux = newDirectory(random); + Directory aux2 = newDirectory(random); setUpDirs(dir, aux); @@ -434,6 +449,9 @@ assertEquals(1060, writer.maxDoc()); assertEquals(1000, writer.getDocCount(0)); writer.close(); + dir.close(); + aux.close(); + aux2.close(); } private IndexWriter newWriter(Directory dir, IndexWriterConfig conf) @@ -510,7 +528,7 @@ // LUCENE-1270 public void testHangOnClose() throws IOException { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(); lmp.setUseCompoundFile(false); lmp.setUseCompoundDocStore(false); @@ -538,7 +556,7 @@ writer.addDocument(doc2); writer.close(); - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); lmp = new LogByteSizeMergePolicy(); lmp.setMinMergeMB(0.0001); lmp.setUseCompoundFile(false); Index: lucene/src/test/org/apache/lucene/index/TestStressIndexing.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestStressIndexing.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestStressIndexing.java (working copy) @@ -169,7 +169,7 @@ RANDOM = newRandom(); // With ConcurrentMergeScheduler, in RAMDir - Directory directory = new MockRAMDirectory(); + Directory directory = newDirectory(RANDOM); runStressTest(directory, new ConcurrentMergeScheduler()); directory.close(); Index: lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestTermdocPerf.java (working copy) @@ -57,8 +57,7 @@ public class TestTermdocPerf extends LuceneTestCase { - void addDocs(Directory dir, final int ndocs, String field, final String val, final int maxTF, final float percentDocs) throws IOException { - final Random random = newRandom(); + void addDocs(final Random random, Directory dir, final int ndocs, String field, final String val, final int maxTF, final float percentDocs) throws IOException { final RepeatingTokenStream ts = new RepeatingTokenStream(val); Analyzer analyzer = new Analyzer() { @@ -87,10 +86,11 @@ public int doTest(int iter, int ndocs, int maxTF, float percentDocs) throws IOException { - Directory dir = new MockRAMDirectory(); + Random random = newRandom(); + Directory dir = newDirectory(random); long start = System.currentTimeMillis(); - addDocs(dir, ndocs, "foo", "val", maxTF, percentDocs); + addDocs(random, dir, ndocs, "foo", "val", maxTF, percentDocs); long end = System.currentTimeMillis(); if (VERBOSE) System.out.println("milliseconds for creation of " + ndocs + " docs = " + (end-start)); Index: lucene/src/test/org/apache/lucene/index/TestPayloads.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestPayloads.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestPayloads.java (working copy) @@ -101,7 +101,7 @@ // payload bit in the FieldInfo public void testPayloadFieldBit() throws Exception { rnd = newRandom(); - Directory ram = new MockRAMDirectory(); + Directory ram = newDirectory(rnd); PayloadAnalyzer analyzer = new PayloadAnalyzer(); IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(rnd, TEST_VERSION_CURRENT, analyzer)); Document d = new Document(); @@ -153,20 +153,22 @@ assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").storePayloads); assertTrue("Payload field bit should be set.", fi.fieldInfo("f3").storePayloads); reader.close(); + ram.close(); } // Tests if payloads are correctly stored and loaded using both RamDirectory and FSDirectory public void testPayloadsEncoding() throws Exception { rnd = newRandom(); // first perform the test using a RAMDirectory - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(rnd); performTest(rnd, dir); - + dir.close(); // now use a FSDirectory and repeat same test File dirName = _TestUtil.getTempDir("test_payloads"); dir = FSDirectory.open(dirName); performTest(rnd, dir); _TestUtil.rmDir(dirName); + dir.close(); } // builds an index with payloads in the given Directory and performs @@ -489,7 +491,7 @@ final int numDocs = 50 * RANDOM_MULTIPLIER; final ByteArrayPool pool = new ByteArrayPool(numThreads, 5); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(rnd); final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(rnd, TEST_VERSION_CURRENT, new MockAnalyzer())); final String field = "test"; @@ -535,7 +537,7 @@ } } reader.close(); - + dir.close(); assertEquals(pool.size(), numThreads); } Index: lucene/src/test/org/apache/lucene/index/TestLazyBug.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestLazyBug.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestLazyBug.java (working copy) @@ -67,10 +67,10 @@ } }; - private Directory makeIndex() throws RuntimeException { - Directory dir = new MockRAMDirectory(); + private Directory makeIndex() throws Exception { + Random r = newRandom(); + Directory dir = newDirectory(r); try { - Random r = newRandom(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(r, TEST_VERSION_CURRENT, new MockAnalyzer())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); @@ -120,6 +120,7 @@ } } reader.close(); + dir.close(); } public void testLazyWorks() throws Exception { Index: lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (working copy) @@ -38,8 +38,8 @@ Random random = newRandom(); int num=100; - Directory indexA = new MockRAMDirectory(); - Directory indexB = new MockRAMDirectory(); + Directory indexA = newDirectory(random); + Directory indexB = newDirectory(random); fillIndex(random, indexA, 0, num); boolean fail = verifyIndex(indexA, 0); @@ -55,7 +55,7 @@ fail("Index b is invalid"); } - Directory merged = new MockRAMDirectory(); + Directory merged = newDirectory(random); IndexWriter writer = new IndexWriter(merged, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); @@ -68,6 +68,9 @@ merged.close(); assertFalse("The merged index is invalid", fail); + indexA.close(); + indexB.close(); + merged.close(); } private boolean verifyIndex(Directory directory, int startAt) throws IOException Index: lucene/src/test/org/apache/lucene/index/TestParallelReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestParallelReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestParallelReader.java (working copy) @@ -40,9 +40,10 @@ public class TestParallelReader extends LuceneTestCase { - private Searcher parallel; - private Searcher single; + private IndexSearcher parallel; + private IndexSearcher single; private Random random; + private Directory dir, dir1, dir2; @Override protected void setUp() throws Exception { @@ -51,6 +52,16 @@ single = single(random); parallel = parallel(random); } + + @Override + protected void tearDown() throws Exception { + single.getIndexReader().close(); + parallel.getIndexReader().close(); + dir.close(); + dir1.close(); + dir2.close(); + super.tearDown(); + } public void testQueries() throws Exception { queryTest(new TermQuery(new Term("f1", "v1"))); @@ -80,6 +91,9 @@ assertTrue(fieldNames.contains("f2")); assertTrue(fieldNames.contains("f3")); assertTrue(fieldNames.contains("f4")); + pr.close(); + dir1.close(); + dir2.close(); } public void testDocument() throws IOException { @@ -101,6 +115,9 @@ assertEquals("v2", doc24.get("f4")); assertEquals("v2", doc223.get("f2")); assertEquals("v2", doc223.get("f3")); + pr.close(); + dir1.close(); + dir2.close(); } public void testIncompatibleIndexes() throws IOException { @@ -108,7 +125,7 @@ Directory dir1 = getDir1(random); // one document only: - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document d3 = new Document(); d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED)); @@ -117,12 +134,17 @@ ParallelReader pr = new ParallelReader(); pr.add(IndexReader.open(dir1, false)); + IndexReader ir = IndexReader.open(dir2, false); try { - pr.add(IndexReader.open(dir2, false)); + pr.add(ir); fail("didn't get exptected exception: indexes don't have same number of documents"); } catch (IllegalArgumentException e) { // expected exception } + pr.close(); + ir.close(); + dir1.close(); + dir2.close(); } public void testIsCurrent() throws IOException { @@ -147,6 +169,9 @@ // now both are not current anymore assertFalse(pr.isCurrent()); + pr.close(); + dir1.close(); + dir2.close(); } public void testIsOptimized() throws IOException { @@ -197,7 +222,8 @@ // now both indexes are optimized assertTrue(pr.isOptimized()); pr.close(); - + dir1.close(); + dir2.close(); } private void queryTest(Query query) throws IOException { @@ -216,8 +242,8 @@ } // Fields 1-4 indexed together: - private Searcher single(Random random) throws IOException { - Directory dir = new MockRAMDirectory(); + private IndexSearcher single(Random random) throws IOException { + dir = newDirectory(random); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document d1 = new Document(); d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED)); @@ -237,9 +263,9 @@ } // Fields 1 & 2 in one index, 3 & 4 in other, with ParallelReader: - private Searcher parallel(Random random) throws IOException { - Directory dir1 = getDir1(random); - Directory dir2 = getDir2(random); + private IndexSearcher parallel(Random random) throws IOException { + dir1 = getDir1(random); + dir2 = getDir2(random); ParallelReader pr = new ParallelReader(); pr.add(IndexReader.open(dir1, false)); pr.add(IndexReader.open(dir2, false)); @@ -247,7 +273,7 @@ } private Directory getDir1(Random random) throws IOException { - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document d1 = new Document(); d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED)); @@ -262,7 +288,7 @@ } private Directory getDir2(Random random) throws IOException { - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document d3 = new Document(); d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED)); Index: lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java =================================================================== --- lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java (working copy) @@ -25,7 +25,7 @@ public class TestIntBlockCodec extends LuceneTestCase { public void testSimpleIntBlocks() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(newRandom()); IntStreamFactory f = new MockFixedIntBlockCodec(128).getIntFactory(); @@ -47,7 +47,7 @@ } public void testEmptySimpleIntBlocks() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(newRandom()); IntStreamFactory f = new MockFixedIntBlockCodec(128).getIntFactory(); IntIndexOutput out = f.createOutput(dir, "test"); Index: lucene/src/test/org/apache/lucene/index/codecs/preflex/TestSurrogates.java =================================================================== --- lucene/src/test/org/apache/lucene/index/codecs/preflex/TestSurrogates.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/codecs/preflex/TestSurrogates.java (working copy) @@ -274,7 +274,7 @@ public void testSurrogatesOrder() throws Exception { Random r = newRandom(); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(r); RandomIndexWriter w = new RandomIndexWriter(r, dir, newIndexWriterConfig(r, TEST_VERSION_CURRENT, @@ -338,5 +338,7 @@ doTestSeekDoesNotExist(r, numField, fieldTerms, fieldTermsArray, reader); reader.close(); + w.close(); + dir.close(); } } Index: lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestDeletionPolicy.java (working copy) @@ -202,7 +202,7 @@ boolean useCompoundFile = true; Random random = newRandom(); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS); IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) @@ -282,7 +282,7 @@ // Never deletes a commit KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); policy.dir = dir; IndexWriterConfig conf = newIndexWriterConfig(random, @@ -365,7 +365,7 @@ // Never deletes a commit KeepAllDeletionPolicy policy = new KeepAllDeletionPolicy(); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); policy.dir = dir; IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, @@ -474,7 +474,7 @@ KeepNoneOnInitDeletionPolicy policy = new KeepNoneOnInitDeletionPolicy(); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) @@ -523,7 +523,7 @@ boolean useCompoundFile = (pass % 2) != 0; - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N); @@ -588,7 +588,7 @@ KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy); @@ -697,7 +697,7 @@ KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N); - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy) Index: lucene/src/test/org/apache/lucene/index/TestSegmentReader.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestSegmentReader.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestSegmentReader.java (working copy) @@ -31,7 +31,7 @@ import org.apache.lucene.store.MockRAMDirectory; public class TestSegmentReader extends LuceneTestCase { - private MockRAMDirectory dir = new MockRAMDirectory(); + private MockRAMDirectory dir; private Document testDoc = new Document(); private SegmentReader reader = null; @@ -43,10 +43,18 @@ @Override protected void setUp() throws Exception { super.setUp(); + dir = newDirectory(newRandom()); DocHelper.setupDoc(testDoc); SegmentInfo info = DocHelper.writeDoc(dir, testDoc); reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR); } + + @Override + protected void tearDown() throws Exception { + reader.close(); + dir.close(); + super.tearDown(); + } public void test() { assertTrue(dir != null); @@ -81,6 +89,7 @@ assertTrue(deleteReader.isDeleted(0) == true); assertTrue(deleteReader.hasDeletions() == true); assertTrue(deleteReader.numDocs() == 0); + deleteReader.close(); } public void testGetFieldNameVariations() { Index: lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java (working copy) @@ -48,13 +48,13 @@ */ public void testEmptyIndex() throws IOException { Random random = newRandom(); - MockRAMDirectory rd1 = new MockRAMDirectory(); + MockRAMDirectory rd1 = newDirectory(random); IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); iw.close(); - MockRAMDirectory rd2 = new MockRAMDirectory(rd1); + MockRAMDirectory rd2 = newDirectory(random, rd1); - MockRAMDirectory rdOut = new MockRAMDirectory(); + MockRAMDirectory rdOut = newDirectory(random); IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); ParallelReader pr = new ParallelReader(); @@ -78,8 +78,8 @@ * any exception. */ public void testEmptyIndexWithVectors() throws IOException { - MockRAMDirectory rd1 = new MockRAMDirectory(); Random random = newRandom(); + MockRAMDirectory rd1 = newDirectory(random); { IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); @@ -100,7 +100,7 @@ iw.close(); } - MockRAMDirectory rd2 = new MockRAMDirectory(); + MockRAMDirectory rd2 = newDirectory(random); { IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); @@ -108,7 +108,7 @@ iw.close(); } - MockRAMDirectory rdOut = new MockRAMDirectory(); + MockRAMDirectory rdOut = newDirectory(random); IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); ParallelReader pr = new ParallelReader(); Index: lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (working copy) @@ -226,7 +226,7 @@ String fullPath = fullDir(name); Directory dir = FSDirectory.open(new File(fullPath)); - Directory targetDir = new MockRAMDirectory(); + Directory targetDir = newDirectory(random); IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); w.addIndexes(new Directory[] { dir }); @@ -235,6 +235,7 @@ _TestUtil.checkIndex(targetDir); dir.close(); + targetDir.close(); rmDir(name); } } @@ -247,7 +248,7 @@ Directory dir = FSDirectory.open(new File(fullPath)); IndexReader reader = IndexReader.open(dir); - Directory targetDir = new MockRAMDirectory(); + Directory targetDir = newDirectory(random); IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); w.addIndexes(new IndexReader[] { reader }); @@ -257,6 +258,7 @@ _TestUtil.checkIndex(targetDir); dir.close(); + targetDir.close(); rmDir(name); } } Index: lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy) @@ -120,8 +120,9 @@ } public void testSeek() throws IOException { - Directory directory = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(newRandom(), TEST_VERSION_CURRENT, new MockAnalyzer())); + Random random = newRandom(); + Directory directory = newDirectory(random); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i = 0; i < 10; i++) { Document doc = new Document(); doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED)); @@ -152,8 +153,9 @@ assertEquals(tp.docID(), i); assertEquals(tp.nextPosition(), 0); } + reader.close(); + directory.close(); - } Index: lucene/src/test/org/apache/lucene/index/TestNorms.java =================================================================== --- lucene/src/test/org/apache/lucene/index/TestNorms.java (revision 984759) +++ lucene/src/test/org/apache/lucene/index/TestNorms.java (working copy) @@ -76,7 +76,7 @@ */ public void testNorms() throws IOException { Random random = newRandom(); - Directory dir1 = new MockRAMDirectory(); + Directory dir1 = newDirectory(random); norms = new ArrayList(); modifiedNorms = new ArrayList(); @@ -93,13 +93,13 @@ modifiedNorms = new ArrayList(); numDocNorms = 0; - Directory dir2 = new MockRAMDirectory(); + Directory dir2 = newDirectory(random); createIndex(random, dir2); doTestNorms(random, dir2); // add index1 and index2 to a third index: index3 - Directory dir3 = new MockRAMDirectory(); + Directory dir3 = newDirectory(random); createIndex(random, dir3); IndexWriter iw = new IndexWriter(dir3, newIndexWriterConfig(random, Index: lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java =================================================================== --- lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java (revision 984759) +++ lucene/src/test/org/apache/lucene/TestSearchForDuplicates.java (working copy) @@ -20,6 +20,7 @@ import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; +import java.util.Random; import org.apache.lucene.store.*; import org.apache.lucene.document.*; @@ -58,7 +59,8 @@ public void testRun() throws Exception { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw, true); - doTest(pw, false); + Random random = newRandom(); + doTest(random, pw, false); pw.close(); sw.close(); String multiFileOutput = sw.getBuffer().toString(); @@ -66,7 +68,7 @@ sw = new StringWriter(); pw = new PrintWriter(sw, true); - doTest(pw, true); + doTest(random, pw, true); pw.close(); sw.close(); String singleFileOutput = sw.getBuffer().toString(); @@ -75,10 +77,10 @@ } - private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception { - Directory directory = new MockRAMDirectory(); + private void doTest(Random random, PrintWriter out, boolean useCompoundFiles) throws Exception { + Directory directory = newDirectory(random); Analyzer analyzer = new MockAnalyzer(); - IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer); LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy(); lmp.setUseCompoundFile(useCompoundFiles); lmp.setUseCompoundDocStore(useCompoundFiles); @@ -125,6 +127,7 @@ checkHits(hits, MAX_DOCS, searcher); searcher.close(); + directory.close(); } Index: lucene/src/test/org/apache/lucene/store/MockRAMDirectory.java =================================================================== --- lucene/src/test/org/apache/lucene/store/MockRAMDirectory.java (revision 984759) +++ lucene/src/test/org/apache/lucene/store/MockRAMDirectory.java (working copy) @@ -281,8 +281,15 @@ // super() does not throw IOException currently: throw new RuntimeException("MockRAMDirectory: cannot close: there are still open files: " + openFiles); } + open = false; } + boolean open = true; + + public synchronized boolean isOpen() { + return open; + } + /** * Objects that represent fail-able conditions. Objects of a derived * class are created and registered with the mock directory. After Index: lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java =================================================================== --- lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java (revision 984759) +++ lucene/src/test/org/apache/lucene/util/LuceneTestCaseJ4.java (working copy) @@ -26,6 +26,8 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldCache; import org.apache.lucene.search.FieldCache.CacheEntry; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.FieldCacheSanityChecker.Insanity; import org.apache.lucene.index.codecs.CodecProvider; import org.apache.lucene.index.codecs.Codec; @@ -56,6 +58,7 @@ import java.io.PrintStream; import java.io.IOException; import java.util.Arrays; +import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Locale; import java.util.Random; @@ -187,6 +190,8 @@ private static TimeZone timeZone; private static TimeZone savedTimeZone; + private static Map stores; + private static final String[] TEST_CODECS = new String[] {"MockSep", "MockFixedIntBlock", "MockVariableIntBlock"}; private static void swapCodec(Codec c) { @@ -274,6 +279,7 @@ @BeforeClass public static void beforeClassLuceneTestCaseJ4() { + stores = Collections.synchronizedMap(new IdentityHashMap()); codec = installTestCodecs(); savedLocale = Locale.getDefault(); locale = TEST_LOCALE.equals("random") ? randomLocale(seedRnd) : localeForName(TEST_LOCALE); @@ -288,6 +294,15 @@ removeTestCodecs(codec); Locale.setDefault(savedLocale); TimeZone.setDefault(savedTimeZone); + // now look for unclosed resources + for (MockRAMDirectory d : stores.keySet()) { + if (d.isOpen()) { + StackTraceElement elements[] = stores.get(d); + StackTraceElement element = (elements.length > 1) ? elements[1] : null; + fail("directory of test was not closed, opened from: " + element); + } + } + stores = null; } // This is how we get control when errors occur. @@ -567,6 +582,20 @@ return c; } + public static MockRAMDirectory newDirectory(Random r) throws IOException { + StackTraceElement[] stack = new Exception().getStackTrace(); + MockRAMDirectory dir = new MockRAMDirectory(); + stores.put(dir, stack); + return dir; + } + + public static MockRAMDirectory newDirectory(Random r, Directory d) throws IOException { + StackTraceElement[] stack = new Exception().getStackTrace(); + MockRAMDirectory dir = new MockRAMDirectory(d); + stores.put(dir, stack); + return dir; + } + /** return a random Locale from the available locales on the system */ public static Locale randomLocale(Random random) { Locale locales[] = Locale.getAvailableLocales(); Index: lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java =================================================================== --- lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java (revision 984759) +++ lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java (working copy) @@ -56,7 +56,7 @@ long ceil = 2; for(int nbits=1;nbits<63;nbits++) { final int valueCount = 100+rnd.nextInt(500); - final Directory d = new MockRAMDirectory(); + final Directory d = newDirectory(rnd); IndexOutput out = d.createOutput("out.bin"); PackedInts.Writer w = PackedInts.getWriter( @@ -95,6 +95,7 @@ assertEquals(fp, in.getFilePointer()); in.close(); ceil *= 2; + d.close(); } } } @@ -208,7 +209,7 @@ } public void testSingleValue() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(newRandom()); IndexOutput out = dir.createOutput("out"); PackedInts.Writer w = PackedInts.getWriter(out, 1, 8); w.add(17); Index: lucene/src/test/org/apache/lucene/util/LuceneTestCase.java =================================================================== --- lucene/src/test/org/apache/lucene/util/LuceneTestCase.java (revision 984759) +++ lucene/src/test/org/apache/lucene/util/LuceneTestCase.java (working copy) @@ -21,8 +21,10 @@ import java.io.PrintStream; import java.io.IOException; import java.util.Arrays; +import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Locale; +import java.util.Map; import java.util.Random; import java.util.ArrayList; import java.util.List; @@ -39,6 +41,8 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldCache; import org.apache.lucene.search.FieldCache.CacheEntry; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.FieldCacheSanityChecker.Insanity; /** @@ -97,7 +101,9 @@ private Locale savedLocale; private TimeZone timeZone; private TimeZone savedTimeZone; - + + private Map stores; + /** Used to track if setUp and tearDown are called correctly from subclasses */ private boolean setup; @@ -125,6 +131,7 @@ super.setUp(); assertFalse("ensure your tearDown() calls super.tearDown()!!!", setup); setup = true; + stores = new IdentityHashMap(); savedUncaughtExceptionHandler = Thread.getDefaultUncaughtExceptionHandler(); Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { @@ -203,6 +210,15 @@ purgeFieldCache(FieldCache.DEFAULT); } + // now look for unclosed resources + for (MockRAMDirectory d : stores.keySet()) { + if (d.isOpen()) { + StackTraceElement elements[] = stores.get(d); + StackTraceElement element = (elements.length > 1) ? elements[1] : null; + fail("directory of testcase " + getName() + " was not closed, opened from: " + element); + } + } + stores = null; super.tearDown(); } @@ -313,6 +329,20 @@ return LuceneTestCaseJ4.newIndexWriterConfig(r, v, a); } + public MockRAMDirectory newDirectory(Random r) throws IOException { + StackTraceElement[] stack = new Exception().getStackTrace(); + MockRAMDirectory dir = new MockRAMDirectory(); + stores.put(dir, stack); + return dir; + } + + public MockRAMDirectory newDirectory(Random r, Directory d) throws IOException { + StackTraceElement[] stack = new Exception().getStackTrace(); + MockRAMDirectory dir = new MockRAMDirectory(d); + stores.put(dir, stack); + return dir; + } + /** Gets a resource from the classpath as {@link File}. This method should only be used, * if a real file is needed. To get a stream, code should prefer * {@link Class#getResourceAsStream} using {@code this.getClass()}. Index: lucene/src/test/org/apache/lucene/util/TestFieldCacheSanityChecker.java =================================================================== --- lucene/src/test/org/apache/lucene/util/TestFieldCacheSanityChecker.java (revision 984759) +++ lucene/src/test/org/apache/lucene/util/TestFieldCacheSanityChecker.java (working copy) @@ -29,25 +29,26 @@ import org.apache.lucene.util.FieldCacheSanityChecker.InsanityType; import java.io.IOException; +import java.util.Random; public class TestFieldCacheSanityChecker extends LuceneTestCase { protected IndexReader readerA; protected IndexReader readerB; protected IndexReader readerX; - + protected MockRAMDirectory dirA, dirB; private static final int NUM_DOCS = 1000; @Override protected void setUp() throws Exception { super.setUp(); + Random random = newRandom(); + dirA = newDirectory(random); + dirB = newDirectory(random); - MockRAMDirectory dirA = new MockRAMDirectory(); - MockRAMDirectory dirB = new MockRAMDirectory(); + IndexWriter wA = new IndexWriter(dirA, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter wB = new IndexWriter(dirB, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); - IndexWriter wA = new IndexWriter(dirA, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); - IndexWriter wB = new IndexWriter(dirB, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); - long theLong = Long.MAX_VALUE; double theDouble = Double.MAX_VALUE; byte theByte = Byte.MAX_VALUE; @@ -80,6 +81,8 @@ readerA.close(); readerB.close(); readerX.close(); + dirA.close(); + dirB.close(); super.tearDown(); } Index: lucene/src/test/org/apache/lucene/document/TestBinaryDocument.java =================================================================== --- lucene/src/test/org/apache/lucene/document/TestBinaryDocument.java (revision 984759) +++ lucene/src/test/org/apache/lucene/document/TestBinaryDocument.java (working copy) @@ -1,5 +1,7 @@ package org.apache.lucene.document; +import java.util.Random; + import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.index.IndexReader; @@ -55,8 +57,9 @@ assertEquals(2, doc.fields.size()); /** add the doc to a ram index */ - MockRAMDirectory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); writer.addDocument(doc); /** open a reader and fetch the document */ @@ -94,8 +97,9 @@ doc.add(stringFldCompressed); /** add the doc to a ram index */ - MockRAMDirectory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); writer.addDocument(doc); /** open a reader and fetch the document */ Index: lucene/src/test/org/apache/lucene/document/TestDocument.java =================================================================== --- lucene/src/test/org/apache/lucene/document/TestDocument.java (revision 984759) +++ lucene/src/test/org/apache/lucene/document/TestDocument.java (working copy) @@ -1,5 +1,7 @@ package org.apache.lucene.document; +import java.util.Random; + import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; @@ -152,8 +154,9 @@ * @throws Exception on error */ public void testGetValuesForIndexedDocument() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); writer.addDocument(makeDocumentWithFields()); IndexReader reader = writer.getReader(); @@ -230,8 +233,9 @@ doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED)); - MockRAMDirectory dir = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), dir); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, dir); writer.addDocument(doc); field.setValue("id2"); writer.addDocument(doc); Index: lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java =================================================================== --- lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java (revision 984759) +++ lucene/src/java/org/apache/lucene/index/PersistentSnapshotDeletionPolicy.java (working copy) @@ -99,6 +99,7 @@ Document doc = r.document(r.maxDoc() - 1); Field sid = doc.getField(SNAPSHOTS_ID); if (sid == null) { + writer.close(); throw new IllegalStateException("directory is not a valid snapshots store!"); } doc.removeField(SNAPSHOTS_ID); @@ -106,6 +107,7 @@ registerSnapshotInfo(f.name(), f.stringValue(), null); } } else if (numDocs != 0) { + writer.close(); throw new IllegalStateException( "should be at most 1 document in the snapshots directory: " + numDocs); } Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (revision 984759) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (working copy) @@ -20,6 +20,7 @@ import java.io.Reader; import java.util.HashMap; import java.util.Map; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; @@ -319,8 +320,9 @@ public void testStopWordSearching() throws Exception { Analyzer analyzer = new MockAnalyzer(); - Directory ramDir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + Random random = newRandom(); + Directory ramDir = newDirectory(random); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); Document doc = new Document(); doc.add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED)); @@ -337,6 +339,7 @@ ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs; assertEquals(1, hits.length); is.close(); + ramDir.close(); } /** Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (revision 984759) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (working copy) @@ -30,6 +30,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -653,9 +654,9 @@ } public void testFarsiRangeCollating() throws Exception { - - MockRAMDirectory ramDir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + Random random = newRandom(); + MockRAMDirectory ramDir = newDirectory(random); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); Document doc = new Document(); doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES, Field.Index.NOT_ANALYZED)); @@ -699,6 +700,7 @@ assertEquals("The index Term should be included.", 1, result.length); is.close(); + ramDir.close(); } /** for testing legacy DateField support */ @@ -1076,9 +1078,9 @@ } public void testLocalDateFormat() throws IOException, QueryNodeException { - - MockRAMDirectory ramDir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + Random random = newRandom(); + MockRAMDirectory ramDir = newDirectory(random); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw); addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw); iw.close(); @@ -1090,6 +1092,7 @@ assertHits(1, "{12/1/2005 TO 12/4/2005}", is); assertHits(0, "{12/3/2005 TO 12/4/2005}", is); is.close(); + ramDir.close(); } public void testStarParsing() throws Exception { @@ -1275,8 +1278,9 @@ } public void testMultiPhraseQuery() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new CannedAnalyzer())); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new CannedAnalyzer())); Document doc = new Document(); doc.add(new Field("field", "", Field.Store.NO, Field.Index.ANALYZED)); w.addDocument(doc); @@ -1288,6 +1292,7 @@ assertEquals(1, s.search(q, 10).totalHits); r.close(); w.close(); + dir.close(); } } Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQueryParserWrapper.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQueryParserWrapper.java (revision 984759) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQueryParserWrapper.java (working copy) @@ -321,7 +321,7 @@ public void testStopWordSearching() throws Exception { Analyzer analyzer = new MockAnalyzer(); - Directory ramDir = new MockRAMDirectory(); + Directory ramDir = newDirectory(newRandom()); IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); @@ -338,6 +338,7 @@ ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs; assertEquals(1, hits.length); is.close(); + ramDir.close(); } /** Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQueryParserWrapper.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQueryParserWrapper.java (revision 984759) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQueryParserWrapper.java (working copy) @@ -654,7 +654,7 @@ public void testFarsiRangeCollating() throws Exception { - MockRAMDirectory ramDir = new MockRAMDirectory(); + MockRAMDirectory ramDir = newDirectory(newRandom()); IndexWriter iw = new IndexWriter(ramDir, new MockAnalyzer(MockTokenizer.WHITESPACE, false), true, IndexWriter.MaxFieldLength.LIMITED); Document doc = new Document(); @@ -696,6 +696,7 @@ assertEquals("The index Term should be included.", 1, result.length); is.close(); + ramDir.close(); } private String escapeDateString(String s) { @@ -1062,7 +1063,7 @@ public void testLocalDateFormat() throws IOException, ParseException { - MockRAMDirectory ramDir = new MockRAMDirectory(); + MockRAMDirectory ramDir = newDirectory(newRandom()); IndexWriter iw = new IndexWriter(ramDir, new MockAnalyzer(MockTokenizer.WHITESPACE, false), true, IndexWriter.MaxFieldLength.LIMITED); addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw); @@ -1076,6 +1077,7 @@ assertHits(1, "{12/1/2005 TO 12/4/2005}", is); assertHits(0, "{12/3/2005 TO 12/4/2005}", is); is.close(); + ramDir.close(); } public void testStarParsing() throws Exception { Index: lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java =================================================================== --- lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (revision 984759) +++ lucene/contrib/queryparser/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (working copy) @@ -18,6 +18,7 @@ */ import java.util.HashSet; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -34,7 +35,7 @@ import org.apache.lucene.util.LuceneTestCase; public class TestComplexPhraseQuery extends LuceneTestCase { - + MockRAMDirectory rd; Analyzer analyzer = new MockAnalyzer(); DocData docsContent[] = { new DocData("john smith", "1"), @@ -112,8 +113,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - MockRAMDirectory rd = new MockRAMDirectory(); - IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + Random random = newRandom(); + rd = newDirectory(random); + IndexWriter w = new IndexWriter(rd, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); for (int i = 0; i < docsContent.length; i++) { Document doc = new Document(); doc.add(new Field("name", docsContent[i].name, Field.Store.YES, @@ -129,6 +131,7 @@ @Override protected void tearDown() throws Exception { searcher.close(); + rd.close(); super.tearDown(); } Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (revision 984759) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (working copy) @@ -18,6 +18,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.index.IndexReader; @@ -60,9 +61,9 @@ ii.close(); // make sure a Directory acts the same - - Directory d = new MockRAMDirectory(); - new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close(); + Random random = newRandom(); + Directory d = newDirectory(random); + new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())).close(); r = IndexReader.open(d, false); testNorms(r); r.close(); @@ -93,9 +94,9 @@ ii.close(); // make sure a Directory acts the same - - Directory d = new MockRAMDirectory(); - new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close(); + Random random = newRandom(); + Directory d = newDirectory(random); + new IndexWriter(d, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())).close(); r = IndexReader.open(d, false); termsEnumTest(r); r.close(); Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (revision 984759) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (working copy) @@ -21,6 +21,7 @@ import java.util.Comparator; import java.util.Iterator; import java.util.List; +import java.util.Random; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; @@ -61,10 +62,11 @@ public void testLoadIndexReader() throws Exception { - MockRAMDirectory dir = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); // create dir data - IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i = 0; i < 20; i++) { Document document = new Document(); @@ -79,17 +81,18 @@ ir.close(); testEqualBehaviour(dir, ii); + dir.close(); } public void testInstantiatedIndexWriter() throws Exception { - - MockRAMDirectory dir = new MockRAMDirectory(); + Random random = newRandom(); + MockRAMDirectory dir = newDirectory(random); InstantiatedIndex ii = new InstantiatedIndex(); // create dir data - IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig( + IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i = 0; i < 500; i++) { Document document = new Document(); @@ -110,8 +113,8 @@ testEqualBehaviour(dir, ii); + dir.close(); - } Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestSerialization.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestSerialization.java (revision 984759) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestSerialization.java (working copy) @@ -17,26 +17,25 @@ */ -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import java.io.ByteArrayOutputStream; import java.io.ObjectOutputStream; +import java.util.Random; public class TestSerialization extends LuceneTestCase { public void test() throws Exception { + Random random = newRandom(); + Directory dir = newDirectory(random); - Directory dir = new MockRAMDirectory(); - - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("foo", "bar rab abr bra rba", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); doc.add(new Field("moo", "bar rab abr bra rba", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); @@ -52,7 +51,7 @@ oos.writeObject(ii); oos.close(); baos.close(); - + dir.close(); } Index: lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java =================================================================== --- lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (revision 984759) +++ lucene/contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (working copy) @@ -16,6 +16,8 @@ */ import java.io.IOException; +import java.util.Random; + import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -33,7 +35,8 @@ public class TestUnoptimizedReaderOnConstructor extends LuceneTestCase { public void test() throws Exception { - Directory dir = new MockRAMDirectory(); + Random random = newRandom(); + Directory dir = newDirectory(random); IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); addDocument(iw, "Hello, world!"); addDocument(iw, "All work and no play makes jack a dull boy"); @@ -59,7 +62,8 @@ } // todo some assertations. - + unoptimizedReader.close(); + dir.close(); } private void addDocument(IndexWriter iw, String text) throws IOException { Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (revision 984759) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (working copy) @@ -16,6 +16,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -27,12 +29,15 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase { IndexReader input; int NUM_DOCS = 11; - + private Random random; + MockRAMDirectory dir; + @Override protected void setUp() throws Exception { super.setUp(); - MockRAMDirectory dir = new MockRAMDirectory(); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + random = newRandom(); + dir = newDirectory(random); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc; for (int i = 0; i < NUM_DOCS; i++) { doc = new Document(); @@ -44,18 +49,27 @@ input = IndexReader.open(dir, false); // delete the last doc input.deleteDocument(input.maxDoc() - 1); + IndexReader inputOld = input; input = input.reopen(true); + inputOld.close(); } + @Override + protected void tearDown() throws Exception { + input.close(); + dir.close(); + super.tearDown(); + } + /** * Test round-robin splitting. */ public void testSplitRR() throws Exception { MultiPassIndexSplitter splitter = new MultiPassIndexSplitter(); Directory[] dirs = new Directory[]{ - new MockRAMDirectory(), - new MockRAMDirectory(), - new MockRAMDirectory() + newDirectory(random), + newDirectory(random), + newDirectory(random) }; splitter.split(input, dirs, false); IndexReader ir; @@ -86,7 +100,10 @@ assertNotSame("1", te.term()); assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef("0"))); - assertNotSame("0", te.term().utf8ToString()); + assertNotSame("0", te.term().utf8ToString()); + ir.close(); + for (Directory d : dirs) + d.close(); } /** @@ -95,9 +112,9 @@ public void testSplitSeq() throws Exception { MultiPassIndexSplitter splitter = new MultiPassIndexSplitter(); Directory[] dirs = new Directory[]{ - new MockRAMDirectory(), - new MockRAMDirectory(), - new MockRAMDirectory() + newDirectory(random), + newDirectory(random), + newDirectory(random) }; splitter.split(input, dirs, true); IndexReader ir; @@ -122,5 +139,8 @@ Term t = new Term("id", (NUM_DOCS - 1) + ""); assertEquals(TermsEnum.SeekStatus.NOT_FOUND, te.seek(new BytesRef(t.text()))); assertNotSame(t.text(), te.term().utf8ToString()); + ir.close(); + for (Directory d : dirs) + d.close(); } } Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (revision 984759) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (working copy) @@ -8,6 +8,7 @@ import org.apache.lucene.util.LuceneTestCase; import java.util.Collections; +import java.util.Random; /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,10 +27,10 @@ public class TestTermVectorAccessor extends LuceneTestCase { public void test() throws Exception { + Random random = newRandom(); + Directory dir = newDirectory(random); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); - Directory dir = new MockRAMDirectory(); - IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); - Document doc; doc = new Document(); Index: lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (revision 984759) +++ lucene/contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -44,7 +45,7 @@ public static int NUM_DOCS = 5; - public Directory store = new MockRAMDirectory(); + public Directory store; /** inverts the normal notion of lengthNorm */ public static Similarity s = new DefaultSimilarity() { @@ -57,7 +58,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - IndexWriter writer = new IndexWriter(store, new IndexWriterConfig( + Random random = newRandom(); + store = newDirectory(random); + IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i = 0; i < NUM_DOCS; i++) { @@ -75,6 +78,12 @@ writer.close(); } + @Override + protected void tearDown() throws Exception { + store.close(); + super.tearDown(); + } + public void testMissingField() throws Exception { FieldNormModifier fnm = new FieldNormModifier(store, s); fnm.reSetNorms("nobodyherebutuschickens"); Index: lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (revision 984759) +++ lucene/contrib/misc/src/test/org/apache/lucene/misc/TestHighFreqTerms.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; @@ -36,17 +38,19 @@ public void setUp() throws Exception { super.setUp(); - dir= new MockRAMDirectory(); - writer = new IndexWriter(dir, new IndexWriterConfig( + Random random = newRandom(); + dir= newDirectory(random); + writer = new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)) .setMaxBufferedDocs(2)); indexDocs(writer); reader = IndexReader.open(dir, true); } - public void tearDown()throws Exception{ + public void tearDown() throws Exception{ + reader.close(); + dir.close(); super.tearDown(); - reader.close(); } /******************** Tests for getHighFreqTerms **********************************/ Index: lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java =================================================================== --- lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (revision 984759) +++ lucene/contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (working copy) @@ -18,6 +18,7 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -48,7 +49,7 @@ public static int NUM_DOCS = 5; - public Directory store = new MockRAMDirectory(); + public Directory store; /** inverts the normal notion of lengthNorm */ public static Similarity s = new DefaultSimilarity() { @@ -61,7 +62,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - IndexWriter writer = new IndexWriter(store, new IndexWriterConfig( + Random random = newRandom(); + store = newDirectory(random); + IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); for (int i = 0; i < NUM_DOCS; i++) { @@ -82,6 +85,12 @@ writer.close(); } + @Override + protected void tearDown() throws Exception { + store.close(); + super.tearDown(); + } + public void testMissingField() { FieldNormModifier fnm = new FieldNormModifier(store, s); try { Index: lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteCachingWrapperFilter.java =================================================================== --- lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteCachingWrapperFilter.java (revision 984759) +++ lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteCachingWrapperFilter.java (working copy) @@ -17,13 +17,15 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.store.MockRAMDirectory; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -33,11 +35,15 @@ * Tests that the index is cached on the searcher side of things. */ public class TestRemoteCachingWrapperFilter extends RemoteTestCaseJ4 { + private static MockRAMDirectory indexStore; + private static Searchable local; + @BeforeClass public static void beforeClass() throws Exception { // construct an index - MockRAMDirectory indexStore = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig( + Random random = newStaticRandom(TestRemoteCachingWrapperFilter.class); + indexStore = newDirectory(random); + IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED)); @@ -52,10 +58,17 @@ writer.addDocument(doc); writer.optimize(); writer.close(); - Searchable local = new IndexSearcher(indexStore, true); + local = new IndexSearcher(indexStore, true); startServer(local); } + @AfterClass + public static void afterClass() throws Exception { + local.close(); + indexStore.close(); + indexStore = null; + } + private static void search(Query query, Filter filter, int hitNumber, String typeValue) throws Exception { Searchable[] searchables = { lookupRemote() }; Searcher searcher = new MultiSearcher(searchables); Index: lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteSort.java =================================================================== --- lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteSort.java (revision 984759) +++ lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteSort.java (working copy) @@ -35,6 +35,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.BytesRef; +import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -48,6 +49,7 @@ public class TestRemoteSort extends RemoteTestCaseJ4 { private static IndexSearcher full; + private static MockRAMDirectory indexStore; private Query queryX; private Query queryY; private Query queryA; @@ -82,8 +84,9 @@ // create an index of all the documents, or just the x, or just the y documents @BeforeClass public static void beforeClass() throws Exception { - MockRAMDirectory indexStore = new MockRAMDirectory (); - IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig( + Random random = newStaticRandom(TestRemoteSort.class); + indexStore = newDirectory(random); + IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()) .setMaxBufferedDocs(2)); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(1000); @@ -111,6 +114,14 @@ startServer(full); } + @AfterClass + public static void afterClass() throws Exception { + full.close(); + full = null; + indexStore.close(); + indexStore = null; + } + public String getRandomNumberString(int num, int low, int high) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < num; i++) { Index: lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteSearchable.java =================================================================== --- lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteSearchable.java (revision 984759) +++ lucene/contrib/remote/src/test/org/apache/lucene/search/TestRemoteSearchable.java (working copy) @@ -20,25 +20,29 @@ import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.*; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.store.MockRAMDirectory; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; import java.util.Collections; +import java.util.Random; import java.util.Set; import java.util.HashSet; public class TestRemoteSearchable extends RemoteTestCaseJ4 { - + private static MockRAMDirectory indexStore; + private static Searchable local; + @BeforeClass public static void beforeClass() throws Exception { // construct an index - MockRAMDirectory indexStore = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig( + Random random = newStaticRandom(TestRemoteSearchable.class); + indexStore = newDirectory(random); + IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED)); @@ -46,10 +50,17 @@ writer.addDocument(doc); writer.optimize(); writer.close(); - Searchable local = new IndexSearcher(indexStore, true); + local = new IndexSearcher(indexStore, true); startServer(local); } - + + @AfterClass + public static void afterClass() throws Exception { + local.close(); + indexStore.close(); + indexStore = null; + } + private static void search(Query query) throws Exception { // try to search the published index Searchable[] searchables = { lookupRemote() }; Index: lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/builders/TestNumericRangeFilterBuilder.java =================================================================== --- lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/builders/TestNumericRangeFilterBuilder.java (revision 984759) +++ lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/builders/TestNumericRangeFilterBuilder.java (working copy) @@ -20,6 +20,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; +import java.util.Random; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; @@ -61,9 +62,9 @@ String xml = ""; Document doc = getDocumentFromString(xml); Filter filter = filterBuilder.getFilter(doc.getDocumentElement()); - - MockRAMDirectory ramDir = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)); + Random random = newRandom(); + MockRAMDirectory ramDir = newDirectory(random); + IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, null)); writer.commit(); try { @@ -81,6 +82,7 @@ { writer.commit(); writer.close(); + ramDir.close(); } } Index: lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java =================================================================== --- lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (revision 984759) +++ lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (working copy) @@ -4,6 +4,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -58,12 +59,10 @@ //initialize the parser builder=new CorePlusExtensionsParser("contents",analyzer); - //initialize the index (done once, then cached in static data for use with ALL tests) - if(dir==null) - { + Random random = newRandom(); BufferedReader d = new BufferedReader(new InputStreamReader(TestParser.class.getResourceAsStream("reuters21578.txt"))); - dir=new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_24, analyzer)); + dir=newDirectory(random); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random, Version.LUCENE_24, analyzer)); String line = d.readLine(); while(line!=null) { @@ -81,7 +80,6 @@ } d.close(); writer.close(); - } reader=IndexReader.open(dir, true); searcher=new IndexSearcher(reader); @@ -94,7 +92,7 @@ protected void tearDown() throws Exception { reader.close(); searcher.close(); -// dir.close(); + dir.close(); super.tearDown(); } public void testSimpleXML() throws ParserException, IOException Index: lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java =================================================================== --- lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (revision 984759) +++ lucene/contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (working copy) @@ -2,6 +2,7 @@ import java.io.IOException; import java.util.Properties; +import java.util.Random; import java.util.StringTokenizer; import javax.xml.parsers.ParserConfigurationException; @@ -44,6 +45,7 @@ CoreParser builder; Analyzer analyzer=new MockAnalyzer(); private IndexSearcher searcher; + private MockRAMDirectory dir; //A collection of documents' field values for use in our tests String docFieldValues []= @@ -141,8 +143,9 @@ //Create an index - MockRAMDirectory dir=new MockRAMDirectory(); - IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + Random random = newRandom(); + dir=newDirectory(random); + IndexWriter w=new IndexWriter(dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT, analyzer)); for (int i = 0; i < docFieldValues.length; i++) { w.addDocument(getDocumentFromString(docFieldValues[i])); @@ -160,6 +163,7 @@ @Override protected void tearDown() throws Exception { searcher.close(); + dir.close(); super.tearDown(); } } Index: lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java =================================================================== --- lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java (revision 984759) +++ lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestCartesian.java (working copy) @@ -20,6 +20,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -27,7 +28,6 @@ import org.apache.lucene.document.NumericField; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; @@ -49,7 +49,6 @@ import org.apache.lucene.spatial.tier.projections.IProjector; import org.apache.lucene.spatial.tier.projections.SinusoidalProjector; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.LuceneTestCase; public class TestCartesian extends LuceneTestCase { @@ -71,9 +70,10 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); + Random random = newRandom(); + directory = newDirectory(random); - IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); setUpPlotter( 2, 15); @@ -81,7 +81,13 @@ } + @Override + protected void tearDown() throws Exception { + directory.close(); + super.tearDown(); + } + private void setUpPlotter(int base, int top) { for (; base <= top; base ++){ @@ -287,6 +293,7 @@ assertTrue(geo_distance >= lastDistance); lastDistance = geo_distance; } + searcher.close(); } public void testPoleFlipping() throws IOException, InvalidGeoException { @@ -383,6 +390,7 @@ assertTrue(geo_distance >= lastDistance); lastDistance = geo_distance; } + searcher.close(); } public void testRange() throws IOException, InvalidGeoException { @@ -477,6 +485,7 @@ lastDistance = geo_distance; } } + searcher.close(); } @@ -570,5 +579,6 @@ } } + searcher.close(); } } Index: lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java =================================================================== --- lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java (revision 984759) +++ lucene/contrib/spatial/src/test/org/apache/lucene/spatial/tier/TestDistance.java (working copy) @@ -17,13 +17,13 @@ package org.apache.lucene.spatial.tier; import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericField; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.QueryWrapperFilter; @@ -44,8 +44,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())); + Random random = newRandom(); + directory = newDirectory(random); + writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); addData(writer); } @@ -53,6 +54,7 @@ @Override protected void tearDown() throws Exception { writer.close(); + directory.close(); super.tearDown(); } @@ -104,6 +106,7 @@ for(int i=0;i it; @Override protected void setUp() throws Exception { super.setUp(); - IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); + random = newRandom(); + store = newDirectory(random); + IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))); Document doc; @@ -77,6 +80,14 @@ writer.close(); } + @Override + protected void tearDown() throws Exception { + if (indexReader != null) + indexReader.close(); + store.close(); + super.tearDown(); + } + public void testFieldNonExistent() throws IOException { try { indexReader = IndexReader.open(store, true); @@ -187,7 +198,8 @@ } public void testSpellchecker() throws IOException { - SpellChecker sc = new SpellChecker(new MockRAMDirectory()); + Directory dir = newDirectory(random); + SpellChecker sc = new SpellChecker(dir); indexReader = IndexReader.open(store, true); sc.indexDictionary(new LuceneDictionary(indexReader, "contents")); String[] suggestions = sc.suggestSimilar("Tam", 1); @@ -197,6 +209,8 @@ assertEquals(1, suggestions.length); assertEquals("Jerry", suggestions[0]); indexReader.close(); + sc.close(); + dir.close(); } } Index: lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java =================================================================== --- lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (revision 984759) +++ lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (working copy) @@ -36,7 +36,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.util.English; import org.apache.lucene.util.LuceneTestCase; @@ -54,7 +53,7 @@ super.setUp(); //create a user index - userindex = new MockRAMDirectory(); + userindex = newDirectory(random); IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())); @@ -67,9 +66,18 @@ writer.close(); searchers = Collections.synchronizedList(new ArrayList()); // create the spellChecker - spellindex = new MockRAMDirectory(); + spellindex = newDirectory(random); spellChecker = new SpellCheckerMock(spellindex); } + + @Override + protected void tearDown() throws Exception { + userindex.close(); + if (!spellChecker.isClosed()) + spellChecker.close(); + spellindex.close(); + super.tearDown(); + } public void testBuild() throws CorruptIndexException, IOException { @@ -99,7 +107,7 @@ spellChecker.setAccuracy(0.5f); checkCommonSuggestions(r); checkNGramSuggestions(); - + r.close(); } private void checkCommonSuggestions(IndexReader r) throws IOException { @@ -260,6 +268,7 @@ } assertEquals(4, searchers.size()); assertSearchersClosed(); + r.close(); } /* @@ -311,7 +320,7 @@ // 2. and 3. during addwords assertEquals(iterations + 4, searchers.size()); assertSearchersClosed(); - + r.close(); } private void assertLastSearcherOpen(int numSearchers) { Index: lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestPlainTextDictionary.java =================================================================== --- lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestPlainTextDictionary.java (revision 984759) +++ lucene/contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestPlainTextDictionary.java (working copy) @@ -33,13 +33,15 @@ final String LF = System.getProperty("line.separator"); String input = "oneword" + LF + "twoword" + LF + "threeword"; PlainTextDictionary ptd = new PlainTextDictionary(new StringReader(input)); - MockRAMDirectory ramDir = new MockRAMDirectory(); + MockRAMDirectory ramDir = newDirectory(newRandom()); SpellChecker spellChecker = new SpellChecker(ramDir); spellChecker.indexDictionary(ptd); String[] similar = spellChecker.suggestSimilar("treeword", 2); assertEquals(2, similar.length); assertEquals(similar[0], "threeword"); assertEquals(similar[1], "twoword"); + spellChecker.close(); + ramDir.close(); } } Index: lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java =================================================================== --- lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (revision 984759) +++ lucene/contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (working copy) @@ -106,7 +106,7 @@ termField.append(randomTerm()); } - MockRAMDirectory ramdir = new MockRAMDirectory(); + MockRAMDirectory ramdir = newDirectory(random); Analyzer analyzer = randomAnalyzer(); IndexWriter writer = new IndexWriter(ramdir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setCodecProvider(_TestUtil.alwaysCodec("Standard"))); @@ -122,6 +122,7 @@ memory.addField("foo", fooField.toString(), analyzer); memory.addField("term", termField.toString(), analyzer); assertAllQueries(memory, ramdir, analyzer); + ramdir.close(); } /** @@ -136,6 +137,8 @@ TopDocs memDocs = mem.search(qp.parse(query), 1); assertEquals(ramDocs.totalHits, memDocs.totalHits); } + ram.close(); + mem.close(); } /** Index: lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (working copy) @@ -22,6 +22,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; @@ -44,8 +45,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, directory); // Add series of docs with specific information for MoreLikeThis addDoc(writer, "lucene"); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.HashSet; +import java.util.Random; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -41,8 +42,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, directory); //Add series of docs with filterable fields : url, text and dates flags addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101"); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/TermsFilterTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/TermsFilterTest.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/TermsFilterTest.java (working copy) @@ -18,6 +18,7 @@ */ import java.util.HashSet; +import java.util.Random; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; @@ -51,8 +52,9 @@ public void testMissingTerms() throws Exception { String fieldName="field1"; - MockRAMDirectory rd=new MockRAMDirectory(); - RandomIndexWriter w = new RandomIndexWriter(newRandom(), rd); + Random random = newRandom(); + MockRAMDirectory rd=newDirectory(random); + RandomIndexWriter w = new RandomIndexWriter(random, rd); for (int i = 0; i < 100; i++) { Document doc=new Document(); int term=i*10; //terms are units of 10; Index: lucene/contrib/queries/src/test/org/apache/lucene/search/ChainedFilterTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/ChainedFilterTest.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/ChainedFilterTest.java (working copy) @@ -60,7 +60,7 @@ protected void setUp() throws Exception { super.setUp(); random = newRandom(); - directory = new MockRAMDirectory(); + directory = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, directory); Calendar cal = new GregorianCalendar(); cal.clear(); @@ -195,7 +195,7 @@ */ public void testWithCachingFilter() throws Exception { - Directory dir = new MockRAMDirectory(); + Directory dir = newDirectory(random); RandomIndexWriter writer = new RandomIndexWriter(random, dir); IndexReader reader = writer.getReader(); writer.close(); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (working copy) @@ -18,6 +18,7 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; @@ -38,8 +39,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.WHITESPACE, false)); //Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags addDoc(writer, "admin guest", "010", "20040101","Y"); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestRegexQuery.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestRegexQuery.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestRegexQuery.java (working copy) @@ -17,6 +17,8 @@ * limitations under the License. */ +import java.util.Random; + import org.apache.lucene.store.Directory; import org.apache.lucene.store.MockRAMDirectory; import org.apache.lucene.index.IndexReader; @@ -41,8 +43,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, directory); Document doc = new Document(); doc.add(new Field(FN, "the quick brown fox jumps over the lazy dog", Field.Store.NO, Field.Index.ANALYZED)); writer.addDocument(doc); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (working copy) @@ -18,6 +18,7 @@ */ import java.io.IOException; +import java.util.Random; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; @@ -39,13 +40,28 @@ public class TestSpanRegexQuery extends LuceneTestCase { - Directory indexStoreA = new MockRAMDirectory(); - - Directory indexStoreB = new MockRAMDirectory(); - + Directory indexStoreA; + Directory indexStoreB; + Random random; + + @Override + public void setUp() throws Exception { + super.setUp(); + random = newRandom(); + indexStoreA = newDirectory(random); + indexStoreB = newDirectory(random); + } + + @Override + public void tearDown() throws Exception { + indexStoreA.close(); + indexStoreB.close(); + super.tearDown(); + } + public void testSpanRegex() throws Exception { - MockRAMDirectory directory = new MockRAMDirectory(); - IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig( + MockRAMDirectory directory = newDirectory(random); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer())); Document doc = new Document(); // doc.add(new Field("field", "the quick brown fox jumps over the lazy dog", @@ -69,6 +85,8 @@ // true); int numHits = searcher.search(sfq, null, 1000).totalHits; assertEquals(1, numHits); + searcher.close(); + directory.close(); } public void testSpanRegexBug() throws CorruptIndexException, IOException { @@ -111,14 +129,14 @@ Field.Index.ANALYZED_NO_NORMS)); // creating first index writer - IndexWriter writerA = new IndexWriter(indexStoreA, new IndexWriterConfig( + IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); writerA.addDocument(lDoc); writerA.optimize(); writerA.close(); // creating second index writer - IndexWriter writerB = new IndexWriter(indexStoreB, new IndexWriterConfig( + IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE)); writerB.addDocument(lDoc2); writerB.optimize(); Index: lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java =================================================================== --- lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (revision 984759) +++ lucene/contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (working copy) @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.HashSet; +import java.util.Random; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; @@ -39,8 +40,9 @@ @Override protected void setUp() throws Exception { super.setUp(); - directory = new MockRAMDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(newRandom(), directory); + Random random = newRandom(); + directory = newDirectory(random); + RandomIndexWriter writer = new RandomIndexWriter(random, directory); //Add series of docs with misspelt names addDoc(writer, "jonathon smythe","1");